Changes in / [29adb33:26c9b85] in sasview
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc94280c rbbd0f37 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\ 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 TransmissionSpectrum, Detector 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture 12 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector 14 13 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 14 15 15 16 16 17 17 class Reader(): 18 18 """ 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens 20 Mantid generated HDF5 formatted files with file extension .h5/.H5. Any 21 number of data sets may be present within the file and any dimensionality 22 of data may be used. Currently 1D and 2D SAS data sets are supported, but 23 future implementations will include 1D and 2D SESANS data. 24 25 Any number of SASdata sets may be present in a SASentry and the data within 26 can be either 1D I(Q) or 2D I(Qx, Qy). 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files 20 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 21 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 22 SESANS data. 23 24 Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy). 27 25 28 26 Also supports reading NXcanSAS formatted HDF5 files … … 32 30 """ 33 31 34 # CanSAS version32 ## CanSAS version 35 33 cansas_version = 2.0 36 # Logged warnings or messages34 ## Logged warnings or messages 37 35 logging = None 38 # List of errors for the current data set36 ## List of errors for the current data set 39 37 errors = None 40 # Raw file contents to be processed38 ## Raw file contents to be processed 41 39 raw_data = None 42 # Data info currently being read in40 ## Data info currently being read in 43 41 current_datainfo = None 44 # SASdata set currently being read in42 ## SASdata set currently being read in 45 43 current_dataset = None 46 # List of plottable1D objects that should be linked to the current_datainfo44 ## List of plottable1D objects that should be linked to the current_datainfo 47 45 data1d = None 48 # List of plottable2D objects that should be linked to the current_datainfo46 ## List of plottable2D objects that should be linked to the current_datainfo 49 47 data2d = None 50 # Data type name48 ## Data type name 51 49 type_name = "CanSAS 2.0" 52 # Wildcards50 ## Wildcards 53 51 type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"] 54 # List of allowed extensions52 ## List of allowed extensions 55 53 ext = ['.h5', '.H5'] 56 # Flag to bypass extension check57 allow_all = True58 # List of files to return54 ## Flag to bypass extension check 55 allow_all = False 56 ## List of files to return 59 57 output = None 60 58 … … 66 64 :return: List of Data1D/2D objects and/or a list of errors. 67 65 """ 68 # Reinitializewhen loading a new data file to reset all class variables66 ## Reinitialize the class when loading a new data file to reset all class variables 69 67 self.reset_class_variables() 70 # Check that the file exists68 ## Check that the file exists 71 69 if os.path.isfile(filename): 72 70 basename = os.path.basename(filename) … … 74 72 # If the file type is not allowed, return empty list 75 73 if extension in self.ext or self.allow_all: 76 # Load the data file74 ## Load the data file 77 75 self.raw_data = h5py.File(filename, 'r') 78 # Read in all child elements of top level SASroot76 ## Read in all child elements of top level SASroot 79 77 self.read_children(self.raw_data, []) 80 # Add the last data set to the list of outputs78 ## Add the last data set to the list of outputs 81 79 self.add_data_set() 82 # Close the data file80 ## Close the data file 83 81 self.raw_data.close() 84 # Return data set(s)82 ## Return data set(s) 85 83 return self.output 86 84 … … 112 110 """ 113 111 114 # Loop through each element of the parent and process accordingly112 ## Loop through each element of the parent and process accordingly 115 113 for key in data.keys(): 116 # Get all information for the current key114 ## Get all information for the current key 117 115 value = data.get(key) 118 116 if value.attrs.get(u'canSAS_class') is not None: … … 128 126 self.parent_class = class_name 129 127 parent_list.append(key) 130 # If a new sasentry, store the current data sets and create 131 # a fresh Data1D/2D object 128 ## If this is a new sasentry, store the current data sets and create a fresh Data1D/2D object 132 129 if class_prog.match(u'SASentry'): 133 130 self.add_data_set(key) 134 131 elif class_prog.match(u'SASdata'): 135 132 self._initialize_new_data_set(parent_list) 136 # Recursion step to access data within the group133 ## Recursion step to access data within the group 137 134 self.read_children(value, parent_list) 138 135 self.add_intermediate() … … 140 137 141 138 elif isinstance(value, h5py.Dataset): 142 # If this is a dataset, store the data appropriately139 ## If this is a dataset, store the data appropriately 143 140 data_set = data[key][:] 144 141 unit = self._get_unit(value) 145 142 146 # I and Q Data143 ## I and Q Data 147 144 if key == u'I': 148 if isinstance(self.current_dataset, plottable_2D):145 if type(self.current_dataset) is plottable_2D: 149 146 self.current_dataset.data = data_set 150 147 self.current_dataset.zaxis("Intensity", unit) … … 154 151 continue 155 152 elif key == u'Idev': 156 if isinstance(self.current_dataset, plottable_2D):153 if type(self.current_dataset) is plottable_2D: 157 154 self.current_dataset.err_data = data_set.flatten() 158 155 else: … … 161 158 elif key == u'Q': 162 159 self.current_dataset.xaxis("Q", unit) 163 if isinstance(self.current_dataset, plottable_2D):160 if type(self.current_dataset) is plottable_2D: 164 161 self.current_dataset.q = data_set.flatten() 165 162 else: … … 169 166 self.current_dataset.dx = data_set.flatten() 170 167 continue 171 elif key == u'dQw':172 self.current_dataset.dxw = data_set.flatten()173 continue174 elif key == u'dQl':175 self.current_dataset.dxl = data_set.flatten()176 continue177 168 elif key == u'Qy': 178 169 self.current_dataset.yaxis("Q_y", unit) … … 192 183 self.current_dataset.mask = data_set.flatten() 193 184 continue 194 # Transmission Spectrum195 elif (key == u'T'196 and self.parent_class == u'SAStransmission_spectrum'):197 self.trans_spectrum.transmission = data_set.flatten()198 continue199 elif (key == u'Tdev'200 and self.parent_class == u'SAStransmission_spectrum'):201 self.trans_spectrum.transmission_deviation = \202 data_set.flatten()203 continue204 elif (key == u'lambda'205 and self.parent_class == u'SAStransmission_spectrum'):206 self.trans_spectrum.wavelength = data_set.flatten()207 continue208 185 209 186 for data_point in data_set: 210 # Top Level Meta Data187 ## Top Level Meta Data 211 188 if key == u'definition': 212 189 self.current_datainfo.meta_data['reader'] = data_point … … 224 201 self.current_datainfo.notes.append(data_point) 225 202 226 # Sample Information 227 # CanSAS 2.0 format 228 elif key == u'Title' and self.parent_class == u'SASsample': 203 ## Sample Information 204 elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format 229 205 self.current_datainfo.sample.name = data_point 230 # NXcanSAS format 231 elif key == u'name' and self.parent_class == u'SASsample': 206 elif key == u'ID' and self.parent_class == u'SASsample': # NXcanSAS format 232 207 self.current_datainfo.sample.name = data_point 233 # NXcanSAS format 234 elif key == u'ID' and self.parent_class == u'SASsample': 235 self.current_datainfo.sample.name = data_point 236 elif (key == u'thickness' 237 and self.parent_class == u'SASsample'): 208 elif key == u'thickness' and self.parent_class == u'SASsample': 238 209 self.current_datainfo.sample.thickness = data_point 239 elif (key == u'temperature' 240 and self.parent_class == u'SASsample'): 210 elif key == u'temperature' and self.parent_class == u'SASsample': 241 211 self.current_datainfo.sample.temperature = data_point 242 elif (key == u'transmission' 243 and self.parent_class == u'SASsample'): 212 elif key == u'transmission' and self.parent_class == u'SASsample': 244 213 self.current_datainfo.sample.transmission = data_point 245 elif (key == u'x_position' 246 and self.parent_class == u'SASsample'): 214 elif key == u'x_position' and self.parent_class == u'SASsample': 247 215 self.current_datainfo.sample.position.x = data_point 248 elif (key == u'y_position' 249 and self.parent_class == u'SASsample'): 216 elif key == u'y_position' and self.parent_class == u'SASsample': 250 217 self.current_datainfo.sample.position.y = data_point 251 elif key == u'p itch' and self.parent_class == u'SASsample':218 elif key == u'polar_angle' and self.parent_class == u'SASsample': 252 219 self.current_datainfo.sample.orientation.x = data_point 253 elif key == u'yaw' and self.parent_class == u'SASsample': 254 self.current_datainfo.sample.orientation.y = data_point 255 elif key == u'roll' and self.parent_class == u'SASsample': 220 elif key == u'azimuthal_angle' and self.parent_class == u'SASsample': 256 221 self.current_datainfo.sample.orientation.z = data_point 257 elif (key == u'details' 258 and self.parent_class == u'SASsample'): 222 elif key == u'details' and self.parent_class == u'SASsample': 259 223 self.current_datainfo.sample.details.append(data_point) 260 224 261 # Instrumental Information 262 elif (key == u'name' 263 and self.parent_class == u'SASinstrument'): 225 ## Instrumental Information 226 elif key == u'name' and self.parent_class == u'SASinstrument': 264 227 self.current_datainfo.instrument = data_point 265 228 elif key == u'name' and self.parent_class == u'SASdetector': … … 268 231 self.detector.distance = float(data_point) 269 232 self.detector.distance_unit = unit 270 elif (key == u'slit_length' 271 and self.parent_class == u'SASdetector'): 233 elif key == u'slit_length' and self.parent_class == u'SASdetector': 272 234 self.detector.slit_length = float(data_point) 273 235 self.detector.slit_length_unit = unit 274 elif (key == u'x_position' 275 and self.parent_class == u'SASdetector'): 236 elif key == u'x_position' and self.parent_class == u'SASdetector': 276 237 self.detector.offset.x = float(data_point) 277 238 self.detector.offset_unit = unit 278 elif (key == u'y_position' 279 and self.parent_class == u'SASdetector'): 239 elif key == u'y_position' and self.parent_class == u'SASdetector': 280 240 self.detector.offset.y = float(data_point) 281 241 self.detector.offset_unit = unit 282 elif (key == u'pitch' 283 and self.parent_class == u'SASdetector'): 242 elif key == u'polar_angle' and self.parent_class == u'SASdetector': 284 243 self.detector.orientation.x = float(data_point) 285 244 self.detector.orientation_unit = unit 286 elif key == u' roll' and self.parent_class == u'SASdetector':245 elif key == u'azimuthal_angle' and self.parent_class == u'SASdetector': 287 246 self.detector.orientation.z = float(data_point) 288 247 self.detector.orientation_unit = unit 289 elif key == u'yaw' and self.parent_class == u'SASdetector': 290 self.detector.orientation.y = float(data_point) 291 self.detector.orientation_unit = unit 292 elif (key == u'beam_center_x' 293 and self.parent_class == u'SASdetector'): 248 elif key == u'beam_center_x' and self.parent_class == u'SASdetector': 294 249 self.detector.beam_center.x = float(data_point) 295 250 self.detector.beam_center_unit = unit 296 elif (key == u'beam_center_y' 297 and self.parent_class == u'SASdetector'): 251 elif key == u'beam_center_y' and self.parent_class == u'SASdetector': 298 252 self.detector.beam_center.y = float(data_point) 299 253 self.detector.beam_center_unit = unit 300 elif (key == u'x_pixel_size' 301 and self.parent_class == u'SASdetector'): 254 elif key == u'x_pixel_size' and self.parent_class == u'SASdetector': 302 255 self.detector.pixel_size.x = float(data_point) 303 256 self.detector.pixel_size_unit = unit 304 elif (key == u'y_pixel_size' 305 and self.parent_class == u'SASdetector'): 257 elif key == u'y_pixel_size' and self.parent_class == u'SASdetector': 306 258 self.detector.pixel_size.y = float(data_point) 307 259 self.detector.pixel_size_unit = unit 308 elif (key == u'distance' 309 and self.parent_class == u'SAScollimation'): 260 elif key == u'SSD' and self.parent_class == u'SAScollimation': 310 261 self.collimation.length = data_point 311 262 self.collimation.length_unit = unit 312 elif (key == u'name' 313 and self.parent_class == u'SAScollimation'): 263 elif key == u'name' and self.parent_class == u'SAScollimation': 314 264 self.collimation.name = data_point 315 elif (key == u'shape' 316 and self.parent_class == u'SASaperture'): 317 self.aperture.shape = data_point 318 elif (key == u'x_gap' 319 and self.parent_class == u'SASaperture'): 320 self.aperture.size.x = data_point 321 elif (key == u'y_gap' 322 and self.parent_class == u'SASaperture'): 323 self.aperture.size.y = data_point 324 325 # Process Information 326 elif (key == u'Title' 327 and self.parent_class == u'SASprocess'): # CanSAS 2.0 265 266 ## Process Information 267 elif key == u'name' and self.parent_class == u'SASprocess': 328 268 self.process.name = data_point 329 elif (key == u'name' 330 and self.parent_class == u'SASprocess'): # NXcanSAS 269 elif key == u'Title' and self.parent_class == u'SASprocess': # CanSAS 2.0 format 331 270 self.process.name = data_point 332 elif (key == u'description' 333 and self.parent_class == u'SASprocess'): 271 elif key == u'name' and self.parent_class == u'SASprocess': # NXcanSAS format 272 self.process.name = data_point 273 elif key == u'description' and self.parent_class == u'SASprocess': 334 274 self.process.description = data_point 335 275 elif key == u'date' and self.parent_class == u'SASprocess': 336 276 self.process.date = data_point 337 elif key == u'term' and self.parent_class == u'SASprocess':338 self.process.term = data_point339 277 elif self.parent_class == u'SASprocess': 340 278 self.process.notes.append(data_point) 341 279 342 # Source 343 elif (key == u'wavelength' 344 and self.parent_class == u'SASdata'): 280 ## Transmission Spectrum 281 elif key == u'T' and self.parent_class == u'SAStransmission_spectrum': 282 self.trans_spectrum.transmission.append(data_point) 283 elif key == u'Tdev' and self.parent_class == u'SAStransmission_spectrum': 284 self.trans_spectrum.transmission_deviation.append(data_point) 285 elif key == u'lambda' and self.parent_class == u'SAStransmission_spectrum': 286 self.trans_spectrum.wavelength.append(data_point) 287 288 ## Source 289 elif key == u'wavelength' and self.parent_class == u'SASdata': 345 290 self.current_datainfo.source.wavelength = data_point 346 291 self.current_datainfo.source.wavelength_unit = unit 347 elif (key == u'incident_wavelength' 348 and self.parent_class == 'SASsource'): 292 elif key == u'incident_wavelength' and self.parent_class == u'SASsource': 349 293 self.current_datainfo.source.wavelength = data_point 350 294 self.current_datainfo.source.wavelength_unit = unit 351 elif (key == u'wavelength_max' 352 and self.parent_class == u'SASsource'): 295 elif key == u'wavelength_max' and self.parent_class == u'SASsource': 353 296 self.current_datainfo.source.wavelength_max = data_point 354 297 self.current_datainfo.source.wavelength_max_unit = unit 355 elif (key == u'wavelength_min' 356 and self.parent_class == u'SASsource'): 298 elif key == u'wavelength_min' and self.parent_class == u'SASsource': 357 299 self.current_datainfo.source.wavelength_min = data_point 358 300 self.current_datainfo.source.wavelength_min_unit = unit 359 elif (key == u'incident_wavelength_spread' 360 and self.parent_class == u'SASsource'): 361 self.current_datainfo.source.wavelength_spread = \ 362 data_point 363 self.current_datainfo.source.wavelength_spread_unit = \ 364 unit 365 elif (key == u'beam_size_x' 366 and self.parent_class == u'SASsource'): 301 elif key == u'wavelength_spread' and self.parent_class == u'SASsource': 302 self.current_datainfo.source.wavelength_spread = data_point 303 self.current_datainfo.source.wavelength_spread_unit = unit 304 elif key == u'beam_size_x' and self.parent_class == u'SASsource': 367 305 self.current_datainfo.source.beam_size.x = data_point 368 306 self.current_datainfo.source.beam_size_unit = unit 369 elif (key == u'beam_size_y' 370 and self.parent_class == u'SASsource'): 307 elif key == u'beam_size_y' and self.parent_class == u'SASsource': 371 308 self.current_datainfo.source.beam_size.y = data_point 372 309 self.current_datainfo.source.beam_size_unit = unit 373 elif (key == u'beam_shape' 374 and self.parent_class == u'SASsource'): 310 elif key == u'beam_shape' and self.parent_class == u'SASsource': 375 311 self.current_datainfo.source.beam_shape = data_point 376 elif (key == u'radiation' 377 and self.parent_class == u'SASsource'): 312 elif key == u'radiation' and self.parent_class == u'SASsource': 378 313 self.current_datainfo.source.radiation = data_point 379 elif (key == u'transmission' 380 and self.parent_class == u'SASdata'): 314 elif key == u'transmission' and self.parent_class == u'SASdata': 381 315 self.current_datainfo.sample.transmission = data_point 382 316 383 # Everything else goes in meta_data317 ## Everything else goes in meta_data 384 318 else: 385 new_key = self._create_unique_key( 386 self.current_datainfo.meta_data, key) 319 new_key = self._create_unique_key(self.current_datainfo.meta_data, key) 387 320 self.current_datainfo.meta_data[new_key] = data_point 388 321 389 322 else: 390 # I don't know if this reachable code323 ## I don't know if this reachable code 391 324 self.errors.add("ShouldNeverHappenException") 392 325 393 326 def add_intermediate(self): 394 327 """ 395 This method stores any intermediate objects within the final data set 396 after fully reading the set. 397 398 :param parent: The NXclass name for the h5py Group object that just 399 finished being processed 328 This method stores any intermediate objects within the final data set after fully reading the set. 329 330 :param parent: The NXclass name for the h5py Group object that just finished being processed 400 331 """ 401 332 … … 416 347 self.aperture = Aperture() 417 348 elif self.parent_class == u'SASdata': 418 if isinstance(self.current_dataset, plottable_2D):349 if type(self.current_dataset) is plottable_2D: 419 350 self.data2d.append(self.current_dataset) 420 elif isinstance(self.current_dataset, plottable_1D):351 elif type(self.current_dataset) is plottable_1D: 421 352 self.data1d.append(self.current_dataset) 422 353 423 354 def final_data_cleanup(self): 424 355 """ 425 Does some final cleanup and formatting on self.current_datainfo and 426 all data1D and data2D objects and then combines the data and info into 427 Data1D and Data2D objects 428 """ 429 430 # Type cast data arrays to float64 356 Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then 357 combines the data and info into Data1D and Data2D objects 358 """ 359 360 ## Type cast data arrays to float64 431 361 if len(self.current_datainfo.trans_spectrum) > 0: 432 362 spectrum_list = [] … … 434 364 spectrum.transmission = np.delete(spectrum.transmission, [0]) 435 365 spectrum.transmission = spectrum.transmission.astype(np.float64) 436 spectrum.transmission_deviation = np.delete( 437 spectrum.transmission_deviation, [0]) 438 spectrum.transmission_deviation = \ 439 spectrum.transmission_deviation.astype(np.float64) 366 spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0]) 367 spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64) 440 368 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 441 369 spectrum.wavelength = spectrum.wavelength.astype(np.float64) … … 444 372 self.current_datainfo.trans_spectrum = spectrum_list 445 373 446 # Append errors to dataset and reset class errors374 ## Append errors to dataset and reset class errors 447 375 self.current_datainfo.errors = self.errors 448 376 self.errors.clear() 449 377 450 # Combine all plottables with datainfo and append each to output451 # Type cast data arrays to float64 and find min/max as appropriate378 ## Combine all plottables with datainfo and append each to output 379 ## Type cast data arrays to float64 and find min/max as appropriate 452 380 for dataset in self.data2d: 453 381 dataset.data = dataset.data.astype(np.float64) … … 469 397 zeros = np.ones(dataset.data.size, dtype=bool) 470 398 try: 471 for i in range (0, dataset.mask.size - 1):399 for i in range (0, dataset.mask.size - 1): 472 400 zeros[i] = dataset.mask[i] 473 401 except: 474 402 self.errors.add(sys.exc_value) 475 403 dataset.mask = zeros 476 # Calculate the actual Q matrix404 ## Calculate the actual Q matrix 477 405 try: 478 406 if dataset.q_data.size <= 1: 479 dataset.q_data = np.sqrt(dataset.qx_data 480 * dataset.qx_data 481 + dataset.qy_data 482 * dataset.qy_data) 407 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data) 483 408 except: 484 409 dataset.q_data = None … … 490 415 dataset.data = dataset.data.flatten() 491 416 492 final_dataset = combine_data_info_with_plottable( 493 dataset, self.current_datainfo) 417 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 494 418 self.output.append(final_dataset) 495 419 … … 511 435 if dataset.dy is not None: 512 436 dataset.dy = dataset.dy.astype(np.float64) 513 final_dataset = combine_data_info_with_plottable( 514 dataset, self.current_datainfo) 437 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 515 438 self.output.append(final_dataset) 516 439 517 440 def add_data_set(self, key=""): 518 441 """ 519 Adds the current_dataset to the list of outputs after preforming final 520 processing on the data and then calls a private method to generate a 521 new data set. 442 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 443 private method to generate a new data set. 522 444 523 445 :param key: NeXus group name for current tree level … … 531 453 532 454 533 def _initialize_new_data_set(self, parent_list=None): 534 """ 535 A private class method to generate a new 1D or 2D data object based on 536 the type of data within the set. Outside methods should call 537 add_data_set() to be sure any existing data is stored properly. 455 def _initialize_new_data_set(self, parent_list = None): 456 """ 457 A private class method to generate a new 1D or 2D data object based on the type of data within the set. 458 Outside methods should call add_data_set() to be sure any existing data is stored properly. 538 459 539 460 :param parent_list: List of names of parent elements … … 552 473 def _find_intermediate(self, parent_list, basename=""): 553 474 """ 554 A private class used to find an entry by either using a direct key or 555 knowing the approximate basename. 556 557 :param parent_list: List of parents nodes in the HDF5 file 475 A private class used to find an entry by either using a direct key or knowing the approximate basename. 476 477 :param parent_list: List of parents to the current level in the HDF5 file 558 478 :param basename: Approximate name of an entry to search for 559 479 :return: … … 566 486 top = top.get(parent) 567 487 for key in top.keys(): 568 if key_prog.match(key):488 if (key_prog.match(key)): 569 489 entry = True 570 490 break … … 596 516 """ 597 517 unit = value.attrs.get(u'units') 598 if unit isNone:518 if unit == None: 599 519 unit = value.attrs.get(u'unit') 600 # Convert the unit formats520 ## Convert the unit formats 601 521 if unit == "1/A": 602 522 unit = "A^{-1}"
Note: See TracChangeset
for help on using the changeset viewer.