Changeset 6bd4235 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Jan 23, 2018 10:35:39 AM (7 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, unittest-saveload
- Children:
- 0863065
- Parents:
- 18af6d2 (diff), e110cb0 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/file_reader_base_class.py
r3053a4a ra58b5a0 7 7 import os 8 8 import sys 9 import re9 import math 10 10 import logging 11 11 from abc import abstractmethod … … 25 25 def decode(s): 26 26 return s.decode() if isinstance(s, bytes) else s 27 28 # Data 1D fields for iterative purposes 29 FIELDS_1D = ('x', 'y', 'dx', 'dy', 'dxl', 'dxw') 30 # Data 2D fields for iterative purposes 31 FIELDS_2D = ('data', 'qx_data', 'qy_data', 'q_data', 'err_data', 32 'dqx_data', 'dqy_data', 'mask') 33 27 34 28 35 class FileReader(object): … … 102 109 self.current_dataset = None 103 110 self.filepath = None 111 self.ind = None 104 112 self.output = [] 105 113 … … 159 167 # Sort data by increasing x and remove 1st point 160 168 ind = np.lexsort((data.y, data.x)) 161 data.x = np.asarray([data.x[i] for i in ind]).astype(np.float64)162 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64)169 data.x = self._reorder_1d_array(data.x, ind) 170 data.y = self._reorder_1d_array(data.y, ind) 163 171 if data.dx is not None: 164 172 if len(data.dx) == 0: 165 173 data.dx = None 166 174 continue 167 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64)175 data.dx = self._reorder_1d_array(data.dx, ind) 168 176 if data.dxl is not None: 169 data.dxl = np.asarray([data.dxl[i] for i in ind]).astype(np.float64)177 data.dxl = self._reorder_1d_array(data.dxl, ind) 170 178 if data.dxw is not None: 171 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64)179 data.dxw = self._reorder_1d_array(data.dxw, ind) 172 180 if data.dy is not None: 173 181 if len(data.dy) == 0: 174 182 data.dy = None 175 183 continue 176 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64)184 data.dy = self._reorder_1d_array(data.dy, ind) 177 185 if data.lam is not None: 178 data.lam = np.asarray([data.lam[i] for i in ind]).astype(np.float64)186 data.lam = self._reorder_1d_array(data.lam, ind) 179 187 if data.dlam is not None: 180 data.dlam = np.asarray([data.dlam[i] for i in ind]).astype(np.float64) 188 data.dlam = self._reorder_1d_array(data.dlam, ind) 189 data = self._remove_nans_in_data(data) 181 190 if len(data.x) > 0: 182 191 data.xmin = np.min(data.x) … … 184 193 data.ymin = np.min(data.y) 185 194 data.ymax = np.max(data.y) 195 196 @staticmethod 197 def _reorder_1d_array(array, ind): 198 """ 199 Reorders a 1D array based on the indices passed as ind 200 :param array: Array to be reordered 201 :param ind: Indices used to reorder array 202 :return: reordered array 203 """ 204 array = np.asarray(array, dtype=np.float64) 205 return array[ind] 206 207 @staticmethod 208 def _remove_nans_in_data(data): 209 """ 210 Remove data points where nan is loaded 211 :param data: 1D or 2D data object 212 :return: data with nan points removed 213 """ 214 if isinstance(data, Data1D): 215 fields = FIELDS_1D 216 elif isinstance(data, Data2D): 217 fields = FIELDS_2D 218 else: 219 return data 220 # Make array of good points - all others will be removed 221 good = np.isfinite(getattr(data, fields[0])) 222 for name in fields[1:]: 223 array = getattr(data, name) 224 if array is not None: 225 # Update good points only if not already changed 226 good &= np.isfinite(array) 227 if not np.all(good): 228 for name in fields: 229 array = getattr(data, name) 230 if array is not None: 231 setattr(data, name, array[good]) 232 return data 186 233 187 234 def sort_two_d_data(self): … … 214 261 dataset.x_bins = dataset.qx_data[:int(n_cols)] 215 262 dataset.data = dataset.data.flatten() 263 dataset = self._remove_nans_in_data(dataset) 216 264 if len(dataset.data) > 0: 217 265 dataset.xmin = np.min(dataset.qx_data) -
src/sas/sascalc/dataloader/readers/abs_reader.py
r1efbc190 re3775c6 29 29 type_name = "IGOR 1D" 30 30 # Wildcards 31 type = ["IGOR 1D files (*.abs)|*.abs" ]31 type = ["IGOR 1D files (*.abs)|*.abs", "IGOR 1D USANS files (*.cor)|*.cor"] 32 32 # List of allowed extensions 33 ext = ['.abs' ]33 ext = ['.abs', '.cor'] 34 34 35 35 def get_file_contents(self): … … 46 46 self.current_datainfo = DataInfo() 47 47 self.current_datainfo.filename = filepath 48 self.reset_data_list(len(lines))49 48 detector = Detector() 50 49 data_line = 0 … … 188 187 self.current_dataset.y[data_line] = _y 189 188 self.current_dataset.dy[data_line] = _dy 190 self.current_dataset.dx[data_line] = _dx 189 if _dx > 0: 190 self.current_dataset.dx[data_line] = _dx 191 else: 192 if data_line == 0: 193 self.current_dataset.dx = None 194 self.current_dataset.dxl = np.zeros(len(lines)) 195 self.current_dataset.dxw = np.zeros(len(lines)) 196 self.current_dataset.dxl[data_line] = abs(_dx) 197 self.current_dataset.dxw[data_line] = 0 191 198 data_line += 1 192 199 … … 197 204 pass 198 205 206 # SANS Data: 199 207 # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 200 208 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 201 if line.count("The 6 columns") > 0: 209 # USANS Data: 210 # EMP LEVEL: <value> ; BKG LEVEL: <value> 211 if line.startswith("The 6 columns") or line.startswith("EMP LEVEL"): 202 212 is_data_started = True 203 213 -
src/sas/sascalc/dataloader/readers/associations.py
r574adc7 ra32c19c 26 26 ".dat": "red2d_reader", 27 27 ".abs": "abs_reader", 28 ".cor": "abs_reader", 28 29 ".sans": "danse_reader", 29 30 ".pdh": "anton_paar_saxs_reader" -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r61f329f0 r18af6d2 38 38 # CanSAS version 39 39 cansas_version = 2.0 40 # Logged warnings or messages41 logging = None42 # List of errors for the current data set43 errors = None44 # Raw file contents to be processed45 raw_data = None46 # List of plottable1D objects that should be linked to the current_datainfo47 data1d = None48 # List of plottable2D objects that should be linked to the current_datainfo49 data2d = None50 40 # Data type name 51 41 type_name = "CanSAS 2.0" … … 111 101 self.errors = set() 112 102 self.logging = [] 103 self.q_name = [] 104 self.mask_name = u'' 105 self.i_name = u'' 106 self.i_node = u'' 107 self.q_uncertainties = u'' 108 self.q_resolutions = u'' 109 self.i_uncertainties = u'' 113 110 self.parent_class = u'' 114 111 self.detector = Detector() … … 147 144 self.add_data_set(key) 148 145 elif class_prog.match(u'SASdata'): 146 self._find_data_attributes(value) 149 147 self._initialize_new_data_set(parent_list) 150 148 # Recursion step to access data within the group … … 159 157 data_set = data[key][:] 160 158 unit = self._get_unit(value) 161 162 # I and Q Data163 if key == u'I':164 if isinstance(self.current_dataset, plottable_2D):165 self.current_dataset.data = data_set166 self.current_dataset.zaxis("Intensity", unit)167 else:168 self.current_dataset.y = data_set.flatten()169 self.current_dataset.yaxis("Intensity", unit)170 continue171 elif key == u'Idev':172 if isinstance(self.current_dataset, plottable_2D):173 self.current_dataset.err_data = data_set.flatten()174 else:175 self.current_dataset.dy = data_set.flatten()176 continue177 elif key == u'Q':178 self.current_dataset.xaxis("Q", unit)179 if isinstance(self.current_dataset, plottable_2D):180 self.current_dataset.q = data_set.flatten()181 else:182 self.current_dataset.x = data_set.flatten()183 continue184 elif key == u'Qdev':185 self.current_dataset.dx = data_set.flatten()186 continue187 elif key == u'dQw':188 self.current_dataset.dxw = data_set.flatten()189 continue190 elif key == u'dQl':191 self.current_dataset.dxl = data_set.flatten()192 continue193 elif key == u'Qy':194 self.current_dataset.yaxis("Q_y", unit)195 self.current_dataset.qy_data = data_set.flatten()196 continue197 elif key == u'Qydev':198 self.current_dataset.dqy_data = data_set.flatten()199 continue200 elif key == u'Qx':201 self.current_dataset.xaxis("Q_x", unit)202 self.current_dataset.qx_data = data_set.flatten()203 continue204 elif key == u'Qxdev':205 self.current_dataset.dqx_data = data_set.flatten()206 continue207 elif key == u'Mask':208 self.current_dataset.mask = data_set.flatten()209 continue210 # Transmission Spectrum211 elif (key == u'T'212 and self.parent_class == u'SAStransmission_spectrum'):213 self.trans_spectrum.transmission = data_set.flatten()214 continue215 elif (key == u'Tdev'216 and self.parent_class == u'SAStransmission_spectrum'):217 self.trans_spectrum.transmission_deviation = \218 data_set.flatten()219 continue220 elif (key == u'lambda'221 and self.parent_class == u'SAStransmission_spectrum'):222 self.trans_spectrum.wavelength = data_set.flatten()223 continue224 159 225 160 for data_point in data_set: … … 232 167 if key == u'definition': 233 168 self.current_datainfo.meta_data['reader'] = data_point 169 # Run 234 170 elif key == u'run': 235 171 self.current_datainfo.run.append(data_point) … … 240 176 except Exception: 241 177 pass 178 # Title 242 179 elif key == u'title': 243 180 self.current_datainfo.title = data_point 181 # Note 244 182 elif key == u'SASnote': 245 183 self.current_datainfo.notes.append(data_point) 246 247 184 # Sample Information 248 # CanSAS 2.0 format 249 elif key == u'Title' and self.parent_class == u'SASsample': 250 self.current_datainfo.sample.name = data_point 251 # NXcanSAS format 252 elif key == u'name' and self.parent_class == u'SASsample': 253 self.current_datainfo.sample.name = data_point 254 # NXcanSAS format 255 elif key == u'ID' and self.parent_class == u'SASsample': 256 self.current_datainfo.sample.name = data_point 257 elif (key == u'thickness' 258 and self.parent_class == u'SASsample'): 259 self.current_datainfo.sample.thickness = data_point 260 elif (key == u'temperature' 261 and self.parent_class == u'SASsample'): 262 self.current_datainfo.sample.temperature = data_point 263 elif (key == u'transmission' 264 and self.parent_class == u'SASsample'): 265 self.current_datainfo.sample.transmission = data_point 266 elif (key == u'x_position' 267 and self.parent_class == u'SASsample'): 268 self.current_datainfo.sample.position.x = data_point 269 elif (key == u'y_position' 270 and self.parent_class == u'SASsample'): 271 self.current_datainfo.sample.position.y = data_point 272 elif key == u'pitch' and self.parent_class == u'SASsample': 273 self.current_datainfo.sample.orientation.x = data_point 274 elif key == u'yaw' and self.parent_class == u'SASsample': 275 self.current_datainfo.sample.orientation.y = data_point 276 elif key == u'roll' and self.parent_class == u'SASsample': 277 self.current_datainfo.sample.orientation.z = data_point 278 elif (key == u'details' 279 and self.parent_class == u'SASsample'): 280 self.current_datainfo.sample.details.append(data_point) 281 185 elif self.parent_class == u'SASsample': 186 self.process_sample(data_point, key) 282 187 # Instrumental Information 283 188 elif (key == u'name' 284 189 and self.parent_class == u'SASinstrument'): 285 190 self.current_datainfo.instrument = data_point 286 elif key == u'name' and self.parent_class == u'SASdetector': 287 self.detector.name = data_point 288 elif key == u'SDD' and self.parent_class == u'SASdetector': 289 self.detector.distance = float(data_point) 290 self.detector.distance_unit = unit 291 elif (key == u'slit_length' 292 and self.parent_class == u'SASdetector'): 293 self.detector.slit_length = float(data_point) 294 self.detector.slit_length_unit = unit 295 elif (key == u'x_position' 296 and self.parent_class == u'SASdetector'): 297 self.detector.offset.x = float(data_point) 298 self.detector.offset_unit = unit 299 elif (key == u'y_position' 300 and self.parent_class == u'SASdetector'): 301 self.detector.offset.y = float(data_point) 302 self.detector.offset_unit = unit 303 elif (key == u'pitch' 304 and self.parent_class == u'SASdetector'): 305 self.detector.orientation.x = float(data_point) 306 self.detector.orientation_unit = unit 307 elif key == u'roll' and self.parent_class == u'SASdetector': 308 self.detector.orientation.z = float(data_point) 309 self.detector.orientation_unit = unit 310 elif key == u'yaw' and self.parent_class == u'SASdetector': 311 self.detector.orientation.y = float(data_point) 312 self.detector.orientation_unit = unit 313 elif (key == u'beam_center_x' 314 and self.parent_class == u'SASdetector'): 315 self.detector.beam_center.x = float(data_point) 316 self.detector.beam_center_unit = unit 317 elif (key == u'beam_center_y' 318 and self.parent_class == u'SASdetector'): 319 self.detector.beam_center.y = float(data_point) 320 self.detector.beam_center_unit = unit 321 elif (key == u'x_pixel_size' 322 and self.parent_class == u'SASdetector'): 323 self.detector.pixel_size.x = float(data_point) 324 self.detector.pixel_size_unit = unit 325 elif (key == u'y_pixel_size' 326 and self.parent_class == u'SASdetector'): 327 self.detector.pixel_size.y = float(data_point) 328 self.detector.pixel_size_unit = unit 329 elif (key == u'distance' 330 and self.parent_class == u'SAScollimation'): 331 self.collimation.length = data_point 332 self.collimation.length_unit = unit 333 elif (key == u'name' 334 and self.parent_class == u'SAScollimation'): 335 self.collimation.name = data_point 336 elif (key == u'shape' 337 and self.parent_class == u'SASaperture'): 338 self.aperture.shape = data_point 339 elif (key == u'x_gap' 340 and self.parent_class == u'SASaperture'): 341 self.aperture.size.x = data_point 342 elif (key == u'y_gap' 343 and self.parent_class == u'SASaperture'): 344 self.aperture.size.y = data_point 345 191 # Detector 192 elif self.parent_class == u'SASdetector': 193 self.process_detector(data_point, key, unit) 194 # Collimation 195 elif self.parent_class == u'SAScollimation': 196 self.process_collimation(data_point, key, unit) 197 # Aperture 198 elif self.parent_class == u'SASaperture': 199 self.process_aperture(data_point, key) 346 200 # Process Information 347 elif (key == u'Title' 348 and self.parent_class == u'SASprocess'): # CanSAS 2.0 349 self.process.name = data_point 350 elif (key == u'name' 351 and self.parent_class == u'SASprocess'): # NXcanSAS 352 self.process.name = data_point 353 elif (key == u'description' 354 and self.parent_class == u'SASprocess'): 355 self.process.description = data_point 356 elif key == u'date' and self.parent_class == u'SASprocess': 357 self.process.date = data_point 358 elif key == u'term' and self.parent_class == u'SASprocess': 359 self.process.term = data_point 360 elif self.parent_class == u'SASprocess': 361 self.process.notes.append(data_point) 362 201 elif self.parent_class == u'SASprocess': # CanSAS 2.0 202 self.process_process(data_point, key) 363 203 # Source 364 elif (key == u'wavelength' 365 and self.parent_class == u'SASdata'): 366 self.current_datainfo.source.wavelength = data_point 367 self.current_datainfo.source.wavelength_unit = unit 368 elif (key == u'incident_wavelength' 369 and self.parent_class == 'SASsource'): 370 self.current_datainfo.source.wavelength = data_point 371 self.current_datainfo.source.wavelength_unit = unit 372 elif (key == u'wavelength_max' 373 and self.parent_class == u'SASsource'): 374 self.current_datainfo.source.wavelength_max = data_point 375 self.current_datainfo.source.wavelength_max_unit = unit 376 elif (key == u'wavelength_min' 377 and self.parent_class == u'SASsource'): 378 self.current_datainfo.source.wavelength_min = data_point 379 self.current_datainfo.source.wavelength_min_unit = unit 380 elif (key == u'incident_wavelength_spread' 381 and self.parent_class == u'SASsource'): 382 self.current_datainfo.source.wavelength_spread = \ 383 data_point 384 self.current_datainfo.source.wavelength_spread_unit = \ 385 unit 386 elif (key == u'beam_size_x' 387 and self.parent_class == u'SASsource'): 388 self.current_datainfo.source.beam_size.x = data_point 389 self.current_datainfo.source.beam_size_unit = unit 390 elif (key == u'beam_size_y' 391 and self.parent_class == u'SASsource'): 392 self.current_datainfo.source.beam_size.y = data_point 393 self.current_datainfo.source.beam_size_unit = unit 394 elif (key == u'beam_shape' 395 and self.parent_class == u'SASsource'): 396 self.current_datainfo.source.beam_shape = data_point 397 elif (key == u'radiation' 398 and self.parent_class == u'SASsource'): 399 self.current_datainfo.source.radiation = data_point 400 elif (key == u'transmission' 401 and self.parent_class == u'SASdata'): 402 self.current_datainfo.sample.transmission = data_point 403 204 elif self.parent_class == u'SASsource': 205 self.process_source(data_point, key, unit) 404 206 # Everything else goes in meta_data 207 elif self.parent_class == u'SASdata': 208 self.process_data_object(data_set, key, unit) 209 break 210 elif self.parent_class == u'SAStransmission_spectrum': 211 self.process_trans_spectrum(data_set, key) 212 break 405 213 else: 406 214 new_key = self._create_unique_key( … … 411 219 # I don't know if this reachable code 412 220 self.errors.add("ShouldNeverHappenException") 221 222 def process_data_object(self, data_set, key, unit): 223 """ 224 SASdata processor method 225 :param data_set: data from HDF5 file 226 :param key: canSAS_class attribute 227 :param unit: unit attribute 228 """ 229 if key == self.i_name: 230 if isinstance(self.current_dataset, plottable_2D): 231 self.current_dataset.data = data_set 232 self.current_dataset.zaxis("Intensity", unit) 233 else: 234 self.current_dataset.y = data_set.flatten() 235 self.current_dataset.yaxis("Intensity", unit) 236 elif key == self.i_uncertainties: 237 if isinstance(self.current_dataset, plottable_2D): 238 self.current_dataset.err_data = data_set.flatten() 239 else: 240 self.current_dataset.dy = data_set.flatten() 241 elif key in self.q_name: 242 self.current_dataset.xaxis("Q", unit) 243 if isinstance(self.current_dataset, plottable_2D): 244 self.current_dataset.q = data_set.flatten() 245 else: 246 self.current_dataset.x = data_set.flatten() 247 elif key in self.q_resolutions: 248 if key == u'dQw': 249 self.current_dataset.dxw = data_set.flatten() 250 elif key == u'dQl': 251 self.current_dataset.dxl = data_set.flatten() 252 else: 253 self.current_dataset.dx = data_set.flatten() 254 elif key == u'Qy': 255 self.current_dataset.yaxis("Q_y", unit) 256 self.current_dataset.qy_data = data_set.flatten() 257 elif key == u'Qydev': 258 self.current_dataset.dqy_data = data_set.flatten() 259 elif key == u'Qx': 260 self.current_dataset.xaxis("Q_x", unit) 261 self.current_dataset.qx_data = data_set.flatten() 262 elif key == u'Qxdev': 263 self.current_dataset.dqx_data = data_set.flatten() 264 elif key == self.mask_name: 265 self.current_dataset.mask = data_set.flatten() 266 elif key == u'wavelength': 267 self.current_datainfo.source.wavelength = data_set[0] 268 self.current_datainfo.source.wavelength_unit = unit 269 270 def process_trans_spectrum(self, data_set, key): 271 """ 272 SAStransmission_spectrum processor 273 :param data_set: data from HDF5 file 274 :param key: canSAS_class attribute 275 """ 276 if key == u'T': 277 self.trans_spectrum.transmission = data_set.flatten() 278 elif key == u'Tdev': 279 self.trans_spectrum.transmission_deviation = data_set.flatten() 280 elif key == u'lambda': 281 self.trans_spectrum.wavelength = data_set.flatten() 282 283 def process_sample(self, data_point, key): 284 """ 285 SASsample processor 286 :param data_point: Single point from an HDF5 data file 287 :param key: class name data_point was taken from 288 """ 289 if key == u'Title': 290 self.current_datainfo.sample.name = data_point 291 elif key == u'name': 292 self.current_datainfo.sample.name = data_point 293 elif key == u'ID': 294 self.current_datainfo.sample.name = data_point 295 elif key == u'thickness': 296 self.current_datainfo.sample.thickness = data_point 297 elif key == u'temperature': 298 self.current_datainfo.sample.temperature = data_point 299 elif key == u'transmission': 300 self.current_datainfo.sample.transmission = data_point 301 elif key == u'x_position': 302 self.current_datainfo.sample.position.x = data_point 303 elif key == u'y_position': 304 self.current_datainfo.sample.position.y = data_point 305 elif key == u'pitch': 306 self.current_datainfo.sample.orientation.x = data_point 307 elif key == u'yaw': 308 self.current_datainfo.sample.orientation.y = data_point 309 elif key == u'roll': 310 self.current_datainfo.sample.orientation.z = data_point 311 elif key == u'details': 312 self.current_datainfo.sample.details.append(data_point) 313 314 def process_detector(self, data_point, key, unit): 315 """ 316 SASdetector processor 317 :param data_point: Single point from an HDF5 data file 318 :param key: class name data_point was taken from 319 :param unit: unit attribute from data set 320 """ 321 if key == u'name': 322 self.detector.name = data_point 323 elif key == u'SDD': 324 self.detector.distance = float(data_point) 325 self.detector.distance_unit = unit 326 elif key == u'slit_length': 327 self.detector.slit_length = float(data_point) 328 self.detector.slit_length_unit = unit 329 elif key == u'x_position': 330 self.detector.offset.x = float(data_point) 331 self.detector.offset_unit = unit 332 elif key == u'y_position': 333 self.detector.offset.y = float(data_point) 334 self.detector.offset_unit = unit 335 elif key == u'pitch': 336 self.detector.orientation.x = float(data_point) 337 self.detector.orientation_unit = unit 338 elif key == u'roll': 339 self.detector.orientation.z = float(data_point) 340 self.detector.orientation_unit = unit 341 elif key == u'yaw': 342 self.detector.orientation.y = float(data_point) 343 self.detector.orientation_unit = unit 344 elif key == u'beam_center_x': 345 self.detector.beam_center.x = float(data_point) 346 self.detector.beam_center_unit = unit 347 elif key == u'beam_center_y': 348 self.detector.beam_center.y = float(data_point) 349 self.detector.beam_center_unit = unit 350 elif key == u'x_pixel_size': 351 self.detector.pixel_size.x = float(data_point) 352 self.detector.pixel_size_unit = unit 353 elif key == u'y_pixel_size': 354 self.detector.pixel_size.y = float(data_point) 355 self.detector.pixel_size_unit = unit 356 357 def process_collimation(self, data_point, key, unit): 358 """ 359 SAScollimation processor 360 :param data_point: Single point from an HDF5 data file 361 :param key: class name data_point was taken from 362 :param unit: unit attribute from data set 363 """ 364 if key == u'distance': 365 self.collimation.length = data_point 366 self.collimation.length_unit = unit 367 elif key == u'name': 368 self.collimation.name = data_point 369 370 def process_aperture(self, data_point, key): 371 """ 372 SASaperture processor 373 :param data_point: Single point from an HDF5 data file 374 :param key: class name data_point was taken from 375 """ 376 if key == u'shape': 377 self.aperture.shape = data_point 378 elif key == u'x_gap': 379 self.aperture.size.x = data_point 380 elif key == u'y_gap': 381 self.aperture.size.y = data_point 382 383 def process_source(self, data_point, key, unit): 384 """ 385 SASsource processor 386 :param data_point: Single point from an HDF5 data file 387 :param key: class name data_point was taken from 388 :param unit: unit attribute from data set 389 """ 390 if key == u'incident_wavelength': 391 self.current_datainfo.source.wavelength = data_point 392 self.current_datainfo.source.wavelength_unit = unit 393 elif key == u'wavelength_max': 394 self.current_datainfo.source.wavelength_max = data_point 395 self.current_datainfo.source.wavelength_max_unit = unit 396 elif key == u'wavelength_min': 397 self.current_datainfo.source.wavelength_min = data_point 398 self.current_datainfo.source.wavelength_min_unit = unit 399 elif key == u'incident_wavelength_spread': 400 self.current_datainfo.source.wavelength_spread = data_point 401 self.current_datainfo.source.wavelength_spread_unit = unit 402 elif key == u'beam_size_x': 403 self.current_datainfo.source.beam_size.x = data_point 404 self.current_datainfo.source.beam_size_unit = unit 405 elif key == u'beam_size_y': 406 self.current_datainfo.source.beam_size.y = data_point 407 self.current_datainfo.source.beam_size_unit = unit 408 elif key == u'beam_shape': 409 self.current_datainfo.source.beam_shape = data_point 410 elif key == u'radiation': 411 self.current_datainfo.source.radiation = data_point 412 413 def process_process(self, data_point, key): 414 """ 415 SASprocess processor 416 :param data_point: Single point from an HDF5 data file 417 :param key: class name data_point was taken from 418 """ 419 if key == u'Title': # CanSAS 2.0 420 self.process.name = data_point 421 elif key == u'name': # NXcanSAS 422 self.process.name = data_point 423 elif key == u'description': 424 self.process.description = data_point 425 elif key == u'date': 426 self.process.date = data_point 427 elif key == u'term': 428 self.process.term = data_point 429 else: 430 self.process.notes.append(data_point) 413 431 414 432 def add_intermediate(self): … … 515 533 self.current_datainfo = DataInfo() 516 534 517 518 535 def _initialize_new_data_set(self, parent_list=None): 519 536 """ … … 534 551 self.current_dataset = plottable_1D(x, y) 535 552 self.current_datainfo.filename = self.raw_data.filename 553 self.mask_name = "" 554 self.i_name = "" 555 self.i_node = "" 556 self.q_name = [] 557 self.q_uncertainties = "" 558 self.q_resolutions = "" 559 self.i_uncertainties = "" 560 561 def _find_data_attributes(self, value): 562 """ 563 A class to find the indices for Q, the name of the Qdev and Idev, and 564 the name of the mask. 565 :param value: SASdata/NXdata HDF5 Group 566 """ 567 attrs = value.attrs 568 signal = attrs.get("signal") 569 i_axes = np.array(str(attrs.get("I_axes")).split(",")) 570 q_indices = np.int_(attrs.get("Q_indices").split(",")) 571 keys = value.keys() 572 self.mask_name = attrs.get("mask") 573 for val in q_indices: 574 self.q_name.append(i_axes[val]) 575 self.i_name = signal 576 self.i_node = value.get(self.i_name) 577 for item in self.q_name: 578 if item in keys: 579 q_vals = value.get(item) 580 self.q_uncertainties = q_vals.attrs.get("uncertainty") 581 self.q_resolutions = q_vals.attrs.get("resolution") 582 if self.i_name in keys: 583 i_vals = value.get(self.i_name) 584 self.i_uncertainties = i_vals.attrs.get("uncertainty") 536 585 537 586 def _find_intermediate(self, parent_list, basename=""):
Note: See TracChangeset
for help on using the changeset viewer.