Changeset c416a17 in sasview for src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
- Timestamp:
- May 26, 2017 5:41:44 AM (7 years ago)
- Branches:
- ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- c1e380e
- Parents:
- 6964d44 (diff), 7132e49 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r9687d58 rc416a17 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture 12 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\ 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 TransmissionSpectrum, Detector 13 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 14 15 15 16 16 17 17 class Reader(): 18 18 """ 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files 20 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 21 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 22 SESANS data. 23 24 Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy). 25 <<<<<<< HEAD 26 ======= 27 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens 20 Mantid generated HDF5 formatted files with file extension .h5/.H5. Any 21 number of data sets may be present within the file and any dimensionality 22 of data may be used. Currently 1D and 2D SAS data sets are supported, but 23 future implementations will include 1D and 2D SESANS data. 24 25 Any number of SASdata sets may be present in a SASentry and the data within 26 can be either 1D I(Q) or 2D I(Qx, Qy). 28 27 Also supports reading NXcanSAS formatted HDF5 files 29 >>>>>>> master30 28 31 29 :Dependencies: … … 33 31 """ 34 32 35 # #CanSAS version33 # CanSAS version 36 34 cansas_version = 2.0 37 # #Logged warnings or messages35 # Logged warnings or messages 38 36 logging = None 39 # #List of errors for the current data set37 # List of errors for the current data set 40 38 errors = None 41 # #Raw file contents to be processed39 # Raw file contents to be processed 42 40 raw_data = None 43 # #Data info currently being read in41 # Data info currently being read in 44 42 current_datainfo = None 45 # #SASdata set currently being read in43 # SASdata set currently being read in 46 44 current_dataset = None 47 # #List of plottable1D objects that should be linked to the current_datainfo45 # List of plottable1D objects that should be linked to the current_datainfo 48 46 data1d = None 49 # #List of plottable2D objects that should be linked to the current_datainfo47 # List of plottable2D objects that should be linked to the current_datainfo 50 48 data2d = None 51 # #Data type name49 # Data type name 52 50 type_name = "CanSAS 2.0" 53 # #Wildcards51 # Wildcards 54 52 type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"] 55 # #List of allowed extensions53 # List of allowed extensions 56 54 ext = ['.h5', '.H5'] 57 # #Flag to bypass extension check58 allow_all = False59 # #List of files to return55 # Flag to bypass extension check 56 allow_all = True 57 # List of files to return 60 58 output = None 61 59 … … 67 65 :return: List of Data1D/2D objects and/or a list of errors. 68 66 """ 69 # # Reinitialize the classwhen loading a new data file to reset all class variables67 # Reinitialize when loading a new data file to reset all class variables 70 68 self.reset_class_variables() 71 # #Check that the file exists69 # Check that the file exists 72 70 if os.path.isfile(filename): 73 71 basename = os.path.basename(filename) … … 75 73 # If the file type is not allowed, return empty list 76 74 if extension in self.ext or self.allow_all: 77 # #Load the data file75 # Load the data file 78 76 self.raw_data = h5py.File(filename, 'r') 79 # #Read in all child elements of top level SASroot77 # Read in all child elements of top level SASroot 80 78 self.read_children(self.raw_data, []) 81 # #Add the last data set to the list of outputs79 # Add the last data set to the list of outputs 82 80 self.add_data_set() 83 # #Close the data file81 # Close the data file 84 82 self.raw_data.close() 85 # #Return data set(s)83 # Return data set(s) 86 84 return self.output 87 85 … … 113 111 """ 114 112 115 # #Loop through each element of the parent and process accordingly113 # Loop through each element of the parent and process accordingly 116 114 for key in data.keys(): 117 # #Get all information for the current key115 # Get all information for the current key 118 116 value = data.get(key) 119 117 if value.attrs.get(u'canSAS_class') is not None: … … 129 127 self.parent_class = class_name 130 128 parent_list.append(key) 131 ## If this is a new sasentry, store the current data sets and create a fresh Data1D/2D object 129 # If a new sasentry, store the current data sets and create 130 # a fresh Data1D/2D object 132 131 if class_prog.match(u'SASentry'): 133 132 self.add_data_set(key) 134 133 elif class_prog.match(u'SASdata'): 135 134 self._initialize_new_data_set(parent_list) 136 # #Recursion step to access data within the group135 # Recursion step to access data within the group 137 136 self.read_children(value, parent_list) 138 137 self.add_intermediate() … … 140 139 141 140 elif isinstance(value, h5py.Dataset): 142 # #If this is a dataset, store the data appropriately141 # If this is a dataset, store the data appropriately 143 142 data_set = data[key][:] 144 143 unit = self._get_unit(value) 145 144 146 # #I and Q Data145 # I and Q Data 147 146 if key == u'I': 148 if type(self.current_dataset) is plottable_2D:147 if isinstance(self.current_dataset, plottable_2D): 149 148 self.current_dataset.data = data_set 150 149 self.current_dataset.zaxis("Intensity", unit) … … 154 153 continue 155 154 elif key == u'Idev': 156 if type(self.current_dataset) is plottable_2D:155 if isinstance(self.current_dataset, plottable_2D): 157 156 self.current_dataset.err_data = data_set.flatten() 158 157 else: … … 161 160 elif key == u'Q': 162 161 self.current_dataset.xaxis("Q", unit) 163 if type(self.current_dataset) is plottable_2D:162 if isinstance(self.current_dataset, plottable_2D): 164 163 self.current_dataset.q = data_set.flatten() 165 164 else: … … 183 182 self.current_dataset.mask = data_set.flatten() 184 183 continue 184 # Transmission Spectrum 185 elif (key == u'T' 186 and self.parent_class == u'SAStransmission_spectrum'): 187 self.trans_spectrum.transmission = data_set.flatten() 188 continue 189 elif (key == u'Tdev' 190 and self.parent_class == u'SAStransmission_spectrum'): 191 self.trans_spectrum.transmission_deviation = \ 192 data_set.flatten() 193 continue 194 elif (key == u'lambda' 195 and self.parent_class == u'SAStransmission_spectrum'): 196 self.trans_spectrum.wavelength = data_set.flatten() 197 continue 185 198 186 199 for data_point in data_set: 187 # #Top Level Meta Data200 # Top Level Meta Data 188 201 if key == u'definition': 189 202 self.current_datainfo.meta_data['reader'] = data_point … … 195 208 self.current_datainfo.notes.append(data_point) 196 209 197 ## Sample Information 198 elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format 210 # Sample Information 211 # CanSAS 2.0 format 212 elif key == u'Title' and self.parent_class == u'SASsample': 199 213 self.current_datainfo.sample.name = data_point 200 elif key == u'ID' and self.parent_class == u'SASsample': # NXcanSAS format 214 # NXcanSAS format 215 elif key == u'name' and self.parent_class == u'SASsample': 201 216 self.current_datainfo.sample.name = data_point 202 elif key == u'thickness' and self.parent_class == u'SASsample': 217 # NXcanSAS format 218 elif key == u'ID' and self.parent_class == u'SASsample': 219 self.current_datainfo.sample.name = data_point 220 elif (key == u'thickness' 221 and self.parent_class == u'SASsample'): 203 222 self.current_datainfo.sample.thickness = data_point 204 elif key == u'temperature' and self.parent_class == u'SASsample': 223 elif (key == u'temperature' 224 and self.parent_class == u'SASsample'): 205 225 self.current_datainfo.sample.temperature = data_point 206 207 ## Instrumental Information 208 elif key == u'name' and self.parent_class == u'SASinstrument': 226 elif (key == u'transmission' 227 and self.parent_class == u'SASsample'): 228 self.current_datainfo.sample.transmission = data_point 229 elif (key == u'x_position' 230 and self.parent_class == u'SASsample'): 231 self.current_datainfo.sample.position.x = data_point 232 elif (key == u'y_position' 233 and self.parent_class == u'SASsample'): 234 self.current_datainfo.sample.position.y = data_point 235 elif key == u'pitch' and self.parent_class == u'SASsample': 236 self.current_datainfo.sample.orientation.x = data_point 237 elif key == u'yaw' and self.parent_class == u'SASsample': 238 self.current_datainfo.sample.orientation.y = data_point 239 elif key == u'roll' and self.parent_class == u'SASsample': 240 self.current_datainfo.sample.orientation.z = data_point 241 elif (key == u'details' 242 and self.parent_class == u'SASsample'): 243 self.current_datainfo.sample.details.append(data_point) 244 245 # Instrumental Information 246 elif (key == u'name' 247 and self.parent_class == u'SASinstrument'): 209 248 self.current_datainfo.instrument = data_point 210 249 elif key == u'name' and self.parent_class == u'SASdetector': … … 213 252 self.detector.distance = float(data_point) 214 253 self.detector.distance_unit = unit 215 elif key == u'SSD' and self.parent_class == u'SAScollimation': 254 elif (key == u'slit_length' 255 and self.parent_class == u'SASdetector'): 256 self.detector.slit_length = float(data_point) 257 self.detector.slit_length_unit = unit 258 elif (key == u'x_position' 259 and self.parent_class == u'SASdetector'): 260 self.detector.offset.x = float(data_point) 261 self.detector.offset_unit = unit 262 elif (key == u'y_position' 263 and self.parent_class == u'SASdetector'): 264 self.detector.offset.y = float(data_point) 265 self.detector.offset_unit = unit 266 elif (key == u'pitch' 267 and self.parent_class == u'SASdetector'): 268 self.detector.orientation.x = float(data_point) 269 self.detector.orientation_unit = unit 270 elif key == u'roll' and self.parent_class == u'SASdetector': 271 self.detector.orientation.z = float(data_point) 272 self.detector.orientation_unit = unit 273 elif key == u'yaw' and self.parent_class == u'SASdetector': 274 self.detector.orientation.y = float(data_point) 275 self.detector.orientation_unit = unit 276 elif (key == u'beam_center_x' 277 and self.parent_class == u'SASdetector'): 278 self.detector.beam_center.x = float(data_point) 279 self.detector.beam_center_unit = unit 280 elif (key == u'beam_center_y' 281 and self.parent_class == u'SASdetector'): 282 self.detector.beam_center.y = float(data_point) 283 self.detector.beam_center_unit = unit 284 elif (key == u'x_pixel_size' 285 and self.parent_class == u'SASdetector'): 286 self.detector.pixel_size.x = float(data_point) 287 self.detector.pixel_size_unit = unit 288 elif (key == u'y_pixel_size' 289 and self.parent_class == u'SASdetector'): 290 self.detector.pixel_size.y = float(data_point) 291 self.detector.pixel_size_unit = unit 292 elif (key == u'distance' 293 and self.parent_class == u'SAScollimation'): 216 294 self.collimation.length = data_point 217 295 self.collimation.length_unit = unit 218 elif key == u'name' and self.parent_class == u'SAScollimation': 296 elif (key == u'name' 297 and self.parent_class == u'SAScollimation'): 219 298 self.collimation.name = data_point 220 221 ## Process Information 222 elif key == u'name' and self.parent_class == u'SASprocess': 299 elif (key == u'shape' 300 and self.parent_class == u'SASaperture'): 301 self.aperture.shape = data_point 302 elif (key == u'x_gap' 303 and self.parent_class == u'SASaperture'): 304 self.aperture.size.x = data_point 305 elif (key == u'y_gap' 306 and self.parent_class == u'SASaperture'): 307 self.aperture.size.y = data_point 308 309 # Process Information 310 elif (key == u'Title' 311 and self.parent_class == u'SASprocess'): # CanSAS 2.0 223 312 self.process.name = data_point 224 elif key == u'description' and self.parent_class == u'SASprocess': 313 elif (key == u'name' 314 and self.parent_class == u'SASprocess'): # NXcanSAS 315 self.process.name = data_point 316 elif (key == u'description' 317 and self.parent_class == u'SASprocess'): 225 318 self.process.description = data_point 226 319 elif key == u'date' and self.parent_class == u'SASprocess': 227 320 self.process.date = data_point 321 elif key == u'term' and self.parent_class == u'SASprocess': 322 self.process.term = data_point 228 323 elif self.parent_class == u'SASprocess': 229 324 self.process.notes.append(data_point) 230 325 231 ## Transmission Spectrum 232 elif key == u'T' and self.parent_class == u'SAStransmission_spectrum': 233 self.trans_spectrum.transmission.append(data_point) 234 elif key == u'Tdev' and self.parent_class == u'SAStransmission_spectrum': 235 self.trans_spectrum.transmission_deviation.append(data_point) 236 elif key == u'lambda' and self.parent_class == u'SAStransmission_spectrum': 237 self.trans_spectrum.wavelength.append(data_point) 238 239 ## Source 240 elif key == u'wavelength' and self.parent_class == u'SASdata': 326 # Source 327 elif (key == u'wavelength' 328 and self.parent_class == u'SASdata'): 241 329 self.current_datainfo.source.wavelength = data_point 242 330 self.current_datainfo.source.wavelength_unit = unit 243 elif key == u'incident_wavelength' and self.parent_class == u'SASsource': 331 elif (key == u'incident_wavelength' 332 and self.parent_class == 'SASsource'): 244 333 self.current_datainfo.source.wavelength = data_point 245 334 self.current_datainfo.source.wavelength_unit = unit 246 elif key == u'wavelength_max' and self.parent_class == u'SASsource': 335 elif (key == u'wavelength_max' 336 and self.parent_class == u'SASsource'): 247 337 self.current_datainfo.source.wavelength_max = data_point 248 338 self.current_datainfo.source.wavelength_max_unit = unit 249 elif key == u'wavelength_min' and self.parent_class == u'SASsource': 339 elif (key == u'wavelength_min' 340 and self.parent_class == u'SASsource'): 250 341 self.current_datainfo.source.wavelength_min = data_point 251 342 self.current_datainfo.source.wavelength_min_unit = unit 252 elif key == u'wavelength_spread' and self.parent_class == u'SASsource': 253 self.current_datainfo.source.wavelength_spread = data_point 254 self.current_datainfo.source.wavelength_spread_unit = unit 255 elif key == u'beam_size_x' and self.parent_class == u'SASsource': 343 elif (key == u'incident_wavelength_spread' 344 and self.parent_class == u'SASsource'): 345 self.current_datainfo.source.wavelength_spread = \ 346 data_point 347 self.current_datainfo.source.wavelength_spread_unit = \ 348 unit 349 elif (key == u'beam_size_x' 350 and self.parent_class == u'SASsource'): 256 351 self.current_datainfo.source.beam_size.x = data_point 257 352 self.current_datainfo.source.beam_size_unit = unit 258 elif key == u'beam_size_y' and self.parent_class == u'SASsource': 353 elif (key == u'beam_size_y' 354 and self.parent_class == u'SASsource'): 259 355 self.current_datainfo.source.beam_size.y = data_point 260 356 self.current_datainfo.source.beam_size_unit = unit 261 elif key == u'beam_shape' and self.parent_class == u'SASsource': 357 elif (key == u'beam_shape' 358 and self.parent_class == u'SASsource'): 262 359 self.current_datainfo.source.beam_shape = data_point 263 elif key == u'radiation' and self.parent_class == u'SASsource': 360 elif (key == u'radiation' 361 and self.parent_class == u'SASsource'): 264 362 self.current_datainfo.source.radiation = data_point 265 elif key == u'transmission' and self.parent_class == u'SASdata': 363 elif (key == u'transmission' 364 and self.parent_class == u'SASdata'): 266 365 self.current_datainfo.sample.transmission = data_point 267 366 268 # #Everything else goes in meta_data367 # Everything else goes in meta_data 269 368 else: 270 new_key = self._create_unique_key(self.current_datainfo.meta_data, key) 369 new_key = self._create_unique_key( 370 self.current_datainfo.meta_data, key) 271 371 self.current_datainfo.meta_data[new_key] = data_point 272 372 273 373 else: 274 # #I don't know if this reachable code374 # I don't know if this reachable code 275 375 self.errors.add("ShouldNeverHappenException") 276 376 277 377 def add_intermediate(self): 278 378 """ 279 This method stores any intermediate objects within the final data set after fully reading the set. 280 281 :param parent: The NXclass name for the h5py Group object that just finished being processed 379 This method stores any intermediate objects within the final data set 380 after fully reading the set. 381 382 :param parent: The NXclass name for the h5py Group object that just 383 finished being processed 282 384 """ 283 385 … … 298 400 self.aperture = Aperture() 299 401 elif self.parent_class == u'SASdata': 300 if type(self.current_dataset) is plottable_2D:402 if isinstance(self.current_dataset, plottable_2D): 301 403 self.data2d.append(self.current_dataset) 302 elif type(self.current_dataset) is plottable_1D:404 elif isinstance(self.current_dataset, plottable_1D): 303 405 self.data1d.append(self.current_dataset) 304 406 305 407 def final_data_cleanup(self): 306 408 """ 307 Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then 308 combines the data and info into Data1D and Data2D objects 309 """ 310 311 ## Type cast data arrays to float64 409 Does some final cleanup and formatting on self.current_datainfo and 410 all data1D and data2D objects and then combines the data and info into 411 Data1D and Data2D objects 412 """ 413 414 # Type cast data arrays to float64 312 415 if len(self.current_datainfo.trans_spectrum) > 0: 313 416 spectrum_list = [] … … 315 418 spectrum.transmission = np.delete(spectrum.transmission, [0]) 316 419 spectrum.transmission = spectrum.transmission.astype(np.float64) 317 spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0]) 318 spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64) 420 spectrum.transmission_deviation = np.delete( 421 spectrum.transmission_deviation, [0]) 422 spectrum.transmission_deviation = \ 423 spectrum.transmission_deviation.astype(np.float64) 319 424 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 320 425 spectrum.wavelength = spectrum.wavelength.astype(np.float64) … … 323 428 self.current_datainfo.trans_spectrum = spectrum_list 324 429 325 # #Append errors to dataset and reset class errors430 # Append errors to dataset and reset class errors 326 431 self.current_datainfo.errors = self.errors 327 432 self.errors.clear() 328 433 329 # #Combine all plottables with datainfo and append each to output330 # #Type cast data arrays to float64 and find min/max as appropriate434 # Combine all plottables with datainfo and append each to output 435 # Type cast data arrays to float64 and find min/max as appropriate 331 436 for dataset in self.data2d: 332 437 dataset.data = dataset.data.astype(np.float64) … … 348 453 zeros = np.ones(dataset.data.size, dtype=bool) 349 454 try: 350 for i in range 455 for i in range(0, dataset.mask.size - 1): 351 456 zeros[i] = dataset.mask[i] 352 457 except: 353 458 self.errors.add(sys.exc_value) 354 459 dataset.mask = zeros 355 # #Calculate the actual Q matrix460 # Calculate the actual Q matrix 356 461 try: 357 462 if dataset.q_data.size <= 1: 358 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data) 463 dataset.q_data = np.sqrt(dataset.qx_data 464 * dataset.qx_data 465 + dataset.qy_data 466 * dataset.qy_data) 359 467 except: 360 468 dataset.q_data = None … … 366 474 dataset.data = dataset.data.flatten() 367 475 368 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 476 final_dataset = combine_data_info_with_plottable( 477 dataset, self.current_datainfo) 369 478 self.output.append(final_dataset) 370 479 … … 386 495 if dataset.dy is not None: 387 496 dataset.dy = dataset.dy.astype(np.float64) 388 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 497 final_dataset = combine_data_info_with_plottable( 498 dataset, self.current_datainfo) 389 499 self.output.append(final_dataset) 390 500 391 501 def add_data_set(self, key=""): 392 502 """ 393 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 394 private method to generate a new data set. 503 Adds the current_dataset to the list of outputs after preforming final 504 processing on the data and then calls a private method to generate a 505 new data set. 395 506 396 507 :param key: NeXus group name for current tree level … … 403 514 self.current_datainfo = DataInfo() 404 515 405 def _initialize_new_data_set(self, parent_list = None): 406 """ 407 A private class method to generate a new 1D or 2D data object based on the type of data within the set. 408 Outside methods should call add_data_set() to be sure any existing data is stored properly. 516 517 def _initialize_new_data_set(self, parent_list=None): 518 """ 519 A private class method to generate a new 1D or 2D data object based on 520 the type of data within the set. Outside methods should call 521 add_data_set() to be sure any existing data is stored properly. 409 522 410 523 :param parent_list: List of names of parent elements … … 423 536 def _find_intermediate(self, parent_list, basename=""): 424 537 """ 425 A private class used to find an entry by either using a direct key or knowing the approximate basename. 426 427 :param parent_list: List of parents to the current level in the HDF5 file 538 A private class used to find an entry by either using a direct key or 539 knowing the approximate basename. 540 541 :param parent_list: List of parents nodes in the HDF5 file 428 542 :param basename: Approximate name of an entry to search for 429 543 :return: … … 436 550 top = top.get(parent) 437 551 for key in top.keys(): 438 if (key_prog.match(key)):552 if key_prog.match(key): 439 553 entry = True 440 554 break … … 466 580 """ 467 581 unit = value.attrs.get(u'units') 468 if unit ==None:582 if unit is None: 469 583 unit = value.attrs.get(u'unit') 470 # #Convert the unit formats584 # Convert the unit formats 471 585 if unit == "1/A": 472 586 unit = "A^{-1}"
Note: See TracChangeset
for help on using the changeset viewer.