Changes in / [26c9b85:b2ff1b2] in sasview
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rbbd0f37 rc94280c 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture 12 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\ 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 TransmissionSpectrum, Detector 13 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 14 15 15 16 16 17 17 class Reader(): 18 18 """ 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files 20 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 21 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 22 SESANS data. 23 24 Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy). 19 A class for reading in CanSAS v2.0 data files. The existing iteration opens 20 Mantid generated HDF5 formatted files with file extension .h5/.H5. Any 21 number of data sets may be present within the file and any dimensionality 22 of data may be used. Currently 1D and 2D SAS data sets are supported, but 23 future implementations will include 1D and 2D SESANS data. 24 25 Any number of SASdata sets may be present in a SASentry and the data within 26 can be either 1D I(Q) or 2D I(Qx, Qy). 25 27 26 28 Also supports reading NXcanSAS formatted HDF5 files … … 30 32 """ 31 33 32 # #CanSAS version34 # CanSAS version 33 35 cansas_version = 2.0 34 # #Logged warnings or messages36 # Logged warnings or messages 35 37 logging = None 36 # #List of errors for the current data set38 # List of errors for the current data set 37 39 errors = None 38 # #Raw file contents to be processed40 # Raw file contents to be processed 39 41 raw_data = None 40 # #Data info currently being read in42 # Data info currently being read in 41 43 current_datainfo = None 42 # #SASdata set currently being read in44 # SASdata set currently being read in 43 45 current_dataset = None 44 # #List of plottable1D objects that should be linked to the current_datainfo46 # List of plottable1D objects that should be linked to the current_datainfo 45 47 data1d = None 46 # #List of plottable2D objects that should be linked to the current_datainfo48 # List of plottable2D objects that should be linked to the current_datainfo 47 49 data2d = None 48 # #Data type name50 # Data type name 49 51 type_name = "CanSAS 2.0" 50 # #Wildcards52 # Wildcards 51 53 type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"] 52 # #List of allowed extensions54 # List of allowed extensions 53 55 ext = ['.h5', '.H5'] 54 # #Flag to bypass extension check55 allow_all = False56 # #List of files to return56 # Flag to bypass extension check 57 allow_all = True 58 # List of files to return 57 59 output = None 58 60 … … 64 66 :return: List of Data1D/2D objects and/or a list of errors. 65 67 """ 66 # # Reinitialize the classwhen loading a new data file to reset all class variables68 # Reinitialize when loading a new data file to reset all class variables 67 69 self.reset_class_variables() 68 # #Check that the file exists70 # Check that the file exists 69 71 if os.path.isfile(filename): 70 72 basename = os.path.basename(filename) … … 72 74 # If the file type is not allowed, return empty list 73 75 if extension in self.ext or self.allow_all: 74 # #Load the data file76 # Load the data file 75 77 self.raw_data = h5py.File(filename, 'r') 76 # #Read in all child elements of top level SASroot78 # Read in all child elements of top level SASroot 77 79 self.read_children(self.raw_data, []) 78 # #Add the last data set to the list of outputs80 # Add the last data set to the list of outputs 79 81 self.add_data_set() 80 # #Close the data file82 # Close the data file 81 83 self.raw_data.close() 82 # #Return data set(s)84 # Return data set(s) 83 85 return self.output 84 86 … … 110 112 """ 111 113 112 # #Loop through each element of the parent and process accordingly114 # Loop through each element of the parent and process accordingly 113 115 for key in data.keys(): 114 # #Get all information for the current key116 # Get all information for the current key 115 117 value = data.get(key) 116 118 if value.attrs.get(u'canSAS_class') is not None: … … 126 128 self.parent_class = class_name 127 129 parent_list.append(key) 128 ## If this is a new sasentry, store the current data sets and create a fresh Data1D/2D object 130 # If a new sasentry, store the current data sets and create 131 # a fresh Data1D/2D object 129 132 if class_prog.match(u'SASentry'): 130 133 self.add_data_set(key) 131 134 elif class_prog.match(u'SASdata'): 132 135 self._initialize_new_data_set(parent_list) 133 # #Recursion step to access data within the group136 # Recursion step to access data within the group 134 137 self.read_children(value, parent_list) 135 138 self.add_intermediate() … … 137 140 138 141 elif isinstance(value, h5py.Dataset): 139 # #If this is a dataset, store the data appropriately142 # If this is a dataset, store the data appropriately 140 143 data_set = data[key][:] 141 144 unit = self._get_unit(value) 142 145 143 # #I and Q Data146 # I and Q Data 144 147 if key == u'I': 145 if type(self.current_dataset) is plottable_2D:148 if isinstance(self.current_dataset, plottable_2D): 146 149 self.current_dataset.data = data_set 147 150 self.current_dataset.zaxis("Intensity", unit) … … 151 154 continue 152 155 elif key == u'Idev': 153 if type(self.current_dataset) is plottable_2D:156 if isinstance(self.current_dataset, plottable_2D): 154 157 self.current_dataset.err_data = data_set.flatten() 155 158 else: … … 158 161 elif key == u'Q': 159 162 self.current_dataset.xaxis("Q", unit) 160 if type(self.current_dataset) is plottable_2D:163 if isinstance(self.current_dataset, plottable_2D): 161 164 self.current_dataset.q = data_set.flatten() 162 165 else: … … 166 169 self.current_dataset.dx = data_set.flatten() 167 170 continue 171 elif key == u'dQw': 172 self.current_dataset.dxw = data_set.flatten() 173 continue 174 elif key == u'dQl': 175 self.current_dataset.dxl = data_set.flatten() 176 continue 168 177 elif key == u'Qy': 169 178 self.current_dataset.yaxis("Q_y", unit) … … 183 192 self.current_dataset.mask = data_set.flatten() 184 193 continue 194 # Transmission Spectrum 195 elif (key == u'T' 196 and self.parent_class == u'SAStransmission_spectrum'): 197 self.trans_spectrum.transmission = data_set.flatten() 198 continue 199 elif (key == u'Tdev' 200 and self.parent_class == u'SAStransmission_spectrum'): 201 self.trans_spectrum.transmission_deviation = \ 202 data_set.flatten() 203 continue 204 elif (key == u'lambda' 205 and self.parent_class == u'SAStransmission_spectrum'): 206 self.trans_spectrum.wavelength = data_set.flatten() 207 continue 185 208 186 209 for data_point in data_set: 187 # #Top Level Meta Data210 # Top Level Meta Data 188 211 if key == u'definition': 189 212 self.current_datainfo.meta_data['reader'] = data_point … … 201 224 self.current_datainfo.notes.append(data_point) 202 225 203 ## Sample Information 204 elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format 226 # Sample Information 227 # CanSAS 2.0 format 228 elif key == u'Title' and self.parent_class == u'SASsample': 205 229 self.current_datainfo.sample.name = data_point 206 elif key == u'ID' and self.parent_class == u'SASsample': # NXcanSAS format 230 # NXcanSAS format 231 elif key == u'name' and self.parent_class == u'SASsample': 207 232 self.current_datainfo.sample.name = data_point 208 elif key == u'thickness' and self.parent_class == u'SASsample': 233 # NXcanSAS format 234 elif key == u'ID' and self.parent_class == u'SASsample': 235 self.current_datainfo.sample.name = data_point 236 elif (key == u'thickness' 237 and self.parent_class == u'SASsample'): 209 238 self.current_datainfo.sample.thickness = data_point 210 elif key == u'temperature' and self.parent_class == u'SASsample': 239 elif (key == u'temperature' 240 and self.parent_class == u'SASsample'): 211 241 self.current_datainfo.sample.temperature = data_point 212 elif key == u'transmission' and self.parent_class == u'SASsample': 242 elif (key == u'transmission' 243 and self.parent_class == u'SASsample'): 213 244 self.current_datainfo.sample.transmission = data_point 214 elif key == u'x_position' and self.parent_class == u'SASsample': 245 elif (key == u'x_position' 246 and self.parent_class == u'SASsample'): 215 247 self.current_datainfo.sample.position.x = data_point 216 elif key == u'y_position' and self.parent_class == u'SASsample': 248 elif (key == u'y_position' 249 and self.parent_class == u'SASsample'): 217 250 self.current_datainfo.sample.position.y = data_point 218 elif key == u'p olar_angle' and self.parent_class == u'SASsample':251 elif key == u'pitch' and self.parent_class == u'SASsample': 219 252 self.current_datainfo.sample.orientation.x = data_point 220 elif key == u'azimuthal_angle' and self.parent_class == u'SASsample': 253 elif key == u'yaw' and self.parent_class == u'SASsample': 254 self.current_datainfo.sample.orientation.y = data_point 255 elif key == u'roll' and self.parent_class == u'SASsample': 221 256 self.current_datainfo.sample.orientation.z = data_point 222 elif key == u'details' and self.parent_class == u'SASsample': 257 elif (key == u'details' 258 and self.parent_class == u'SASsample'): 223 259 self.current_datainfo.sample.details.append(data_point) 224 260 225 ## Instrumental Information 226 elif key == u'name' and self.parent_class == u'SASinstrument': 261 # Instrumental Information 262 elif (key == u'name' 263 and self.parent_class == u'SASinstrument'): 227 264 self.current_datainfo.instrument = data_point 228 265 elif key == u'name' and self.parent_class == u'SASdetector': … … 231 268 self.detector.distance = float(data_point) 232 269 self.detector.distance_unit = unit 233 elif key == u'slit_length' and self.parent_class == u'SASdetector': 270 elif (key == u'slit_length' 271 and self.parent_class == u'SASdetector'): 234 272 self.detector.slit_length = float(data_point) 235 273 self.detector.slit_length_unit = unit 236 elif key == u'x_position' and self.parent_class == u'SASdetector': 274 elif (key == u'x_position' 275 and self.parent_class == u'SASdetector'): 237 276 self.detector.offset.x = float(data_point) 238 277 self.detector.offset_unit = unit 239 elif key == u'y_position' and self.parent_class == u'SASdetector': 278 elif (key == u'y_position' 279 and self.parent_class == u'SASdetector'): 240 280 self.detector.offset.y = float(data_point) 241 281 self.detector.offset_unit = unit 242 elif key == u'polar_angle' and self.parent_class == u'SASdetector': 282 elif (key == u'pitch' 283 and self.parent_class == u'SASdetector'): 243 284 self.detector.orientation.x = float(data_point) 244 285 self.detector.orientation_unit = unit 245 elif key == u' azimuthal_angle' and self.parent_class == u'SASdetector':286 elif key == u'roll' and self.parent_class == u'SASdetector': 246 287 self.detector.orientation.z = float(data_point) 247 288 self.detector.orientation_unit = unit 248 elif key == u'beam_center_x' and self.parent_class == u'SASdetector': 289 elif key == u'yaw' and self.parent_class == u'SASdetector': 290 self.detector.orientation.y = float(data_point) 291 self.detector.orientation_unit = unit 292 elif (key == u'beam_center_x' 293 and self.parent_class == u'SASdetector'): 249 294 self.detector.beam_center.x = float(data_point) 250 295 self.detector.beam_center_unit = unit 251 elif key == u'beam_center_y' and self.parent_class == u'SASdetector': 296 elif (key == u'beam_center_y' 297 and self.parent_class == u'SASdetector'): 252 298 self.detector.beam_center.y = float(data_point) 253 299 self.detector.beam_center_unit = unit 254 elif key == u'x_pixel_size' and self.parent_class == u'SASdetector': 300 elif (key == u'x_pixel_size' 301 and self.parent_class == u'SASdetector'): 255 302 self.detector.pixel_size.x = float(data_point) 256 303 self.detector.pixel_size_unit = unit 257 elif key == u'y_pixel_size' and self.parent_class == u'SASdetector': 304 elif (key == u'y_pixel_size' 305 and self.parent_class == u'SASdetector'): 258 306 self.detector.pixel_size.y = float(data_point) 259 307 self.detector.pixel_size_unit = unit 260 elif key == u'SSD' and self.parent_class == u'SAScollimation': 308 elif (key == u'distance' 309 and self.parent_class == u'SAScollimation'): 261 310 self.collimation.length = data_point 262 311 self.collimation.length_unit = unit 263 elif key == u'name' and self.parent_class == u'SAScollimation': 312 elif (key == u'name' 313 and self.parent_class == u'SAScollimation'): 264 314 self.collimation.name = data_point 265 266 ## Process Information 267 elif key == u'name' and self.parent_class == u'SASprocess': 315 elif (key == u'shape' 316 and self.parent_class == u'SASaperture'): 317 self.aperture.shape = data_point 318 elif (key == u'x_gap' 319 and self.parent_class == u'SASaperture'): 320 self.aperture.size.x = data_point 321 elif (key == u'y_gap' 322 and self.parent_class == u'SASaperture'): 323 self.aperture.size.y = data_point 324 325 # Process Information 326 elif (key == u'Title' 327 and self.parent_class == u'SASprocess'): # CanSAS 2.0 268 328 self.process.name = data_point 269 elif key == u'Title' and self.parent_class == u'SASprocess': # CanSAS 2.0 format 329 elif (key == u'name' 330 and self.parent_class == u'SASprocess'): # NXcanSAS 270 331 self.process.name = data_point 271 elif key == u'name' and self.parent_class == u'SASprocess': # NXcanSAS format 272 self.process.name = data_point 273 elif key == u'description' and self.parent_class == u'SASprocess': 332 elif (key == u'description' 333 and self.parent_class == u'SASprocess'): 274 334 self.process.description = data_point 275 335 elif key == u'date' and self.parent_class == u'SASprocess': 276 336 self.process.date = data_point 337 elif key == u'term' and self.parent_class == u'SASprocess': 338 self.process.term = data_point 277 339 elif self.parent_class == u'SASprocess': 278 340 self.process.notes.append(data_point) 279 341 280 ## Transmission Spectrum 281 elif key == u'T' and self.parent_class == u'SAStransmission_spectrum': 282 self.trans_spectrum.transmission.append(data_point) 283 elif key == u'Tdev' and self.parent_class == u'SAStransmission_spectrum': 284 self.trans_spectrum.transmission_deviation.append(data_point) 285 elif key == u'lambda' and self.parent_class == u'SAStransmission_spectrum': 286 self.trans_spectrum.wavelength.append(data_point) 287 288 ## Source 289 elif key == u'wavelength' and self.parent_class == u'SASdata': 342 # Source 343 elif (key == u'wavelength' 344 and self.parent_class == u'SASdata'): 290 345 self.current_datainfo.source.wavelength = data_point 291 346 self.current_datainfo.source.wavelength_unit = unit 292 elif key == u'incident_wavelength' and self.parent_class == u'SASsource': 347 elif (key == u'incident_wavelength' 348 and self.parent_class == 'SASsource'): 293 349 self.current_datainfo.source.wavelength = data_point 294 350 self.current_datainfo.source.wavelength_unit = unit 295 elif key == u'wavelength_max' and self.parent_class == u'SASsource': 351 elif (key == u'wavelength_max' 352 and self.parent_class == u'SASsource'): 296 353 self.current_datainfo.source.wavelength_max = data_point 297 354 self.current_datainfo.source.wavelength_max_unit = unit 298 elif key == u'wavelength_min' and self.parent_class == u'SASsource': 355 elif (key == u'wavelength_min' 356 and self.parent_class == u'SASsource'): 299 357 self.current_datainfo.source.wavelength_min = data_point 300 358 self.current_datainfo.source.wavelength_min_unit = unit 301 elif key == u'wavelength_spread' and self.parent_class == u'SASsource': 302 self.current_datainfo.source.wavelength_spread = data_point 303 self.current_datainfo.source.wavelength_spread_unit = unit 304 elif key == u'beam_size_x' and self.parent_class == u'SASsource': 359 elif (key == u'incident_wavelength_spread' 360 and self.parent_class == u'SASsource'): 361 self.current_datainfo.source.wavelength_spread = \ 362 data_point 363 self.current_datainfo.source.wavelength_spread_unit = \ 364 unit 365 elif (key == u'beam_size_x' 366 and self.parent_class == u'SASsource'): 305 367 self.current_datainfo.source.beam_size.x = data_point 306 368 self.current_datainfo.source.beam_size_unit = unit 307 elif key == u'beam_size_y' and self.parent_class == u'SASsource': 369 elif (key == u'beam_size_y' 370 and self.parent_class == u'SASsource'): 308 371 self.current_datainfo.source.beam_size.y = data_point 309 372 self.current_datainfo.source.beam_size_unit = unit 310 elif key == u'beam_shape' and self.parent_class == u'SASsource': 373 elif (key == u'beam_shape' 374 and self.parent_class == u'SASsource'): 311 375 self.current_datainfo.source.beam_shape = data_point 312 elif key == u'radiation' and self.parent_class == u'SASsource': 376 elif (key == u'radiation' 377 and self.parent_class == u'SASsource'): 313 378 self.current_datainfo.source.radiation = data_point 314 elif key == u'transmission' and self.parent_class == u'SASdata': 379 elif (key == u'transmission' 380 and self.parent_class == u'SASdata'): 315 381 self.current_datainfo.sample.transmission = data_point 316 382 317 # #Everything else goes in meta_data383 # Everything else goes in meta_data 318 384 else: 319 new_key = self._create_unique_key(self.current_datainfo.meta_data, key) 385 new_key = self._create_unique_key( 386 self.current_datainfo.meta_data, key) 320 387 self.current_datainfo.meta_data[new_key] = data_point 321 388 322 389 else: 323 # #I don't know if this reachable code390 # I don't know if this reachable code 324 391 self.errors.add("ShouldNeverHappenException") 325 392 326 393 def add_intermediate(self): 327 394 """ 328 This method stores any intermediate objects within the final data set after fully reading the set. 329 330 :param parent: The NXclass name for the h5py Group object that just finished being processed 395 This method stores any intermediate objects within the final data set 396 after fully reading the set. 397 398 :param parent: The NXclass name for the h5py Group object that just 399 finished being processed 331 400 """ 332 401 … … 347 416 self.aperture = Aperture() 348 417 elif self.parent_class == u'SASdata': 349 if type(self.current_dataset) is plottable_2D:418 if isinstance(self.current_dataset, plottable_2D): 350 419 self.data2d.append(self.current_dataset) 351 elif type(self.current_dataset) is plottable_1D:420 elif isinstance(self.current_dataset, plottable_1D): 352 421 self.data1d.append(self.current_dataset) 353 422 354 423 def final_data_cleanup(self): 355 424 """ 356 Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then 357 combines the data and info into Data1D and Data2D objects 358 """ 359 360 ## Type cast data arrays to float64 425 Does some final cleanup and formatting on self.current_datainfo and 426 all data1D and data2D objects and then combines the data and info into 427 Data1D and Data2D objects 428 """ 429 430 # Type cast data arrays to float64 361 431 if len(self.current_datainfo.trans_spectrum) > 0: 362 432 spectrum_list = [] … … 364 434 spectrum.transmission = np.delete(spectrum.transmission, [0]) 365 435 spectrum.transmission = spectrum.transmission.astype(np.float64) 366 spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0]) 367 spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64) 436 spectrum.transmission_deviation = np.delete( 437 spectrum.transmission_deviation, [0]) 438 spectrum.transmission_deviation = \ 439 spectrum.transmission_deviation.astype(np.float64) 368 440 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 369 441 spectrum.wavelength = spectrum.wavelength.astype(np.float64) … … 372 444 self.current_datainfo.trans_spectrum = spectrum_list 373 445 374 # #Append errors to dataset and reset class errors446 # Append errors to dataset and reset class errors 375 447 self.current_datainfo.errors = self.errors 376 448 self.errors.clear() 377 449 378 # #Combine all plottables with datainfo and append each to output379 # #Type cast data arrays to float64 and find min/max as appropriate450 # Combine all plottables with datainfo and append each to output 451 # Type cast data arrays to float64 and find min/max as appropriate 380 452 for dataset in self.data2d: 381 453 dataset.data = dataset.data.astype(np.float64) … … 397 469 zeros = np.ones(dataset.data.size, dtype=bool) 398 470 try: 399 for i in range 471 for i in range(0, dataset.mask.size - 1): 400 472 zeros[i] = dataset.mask[i] 401 473 except: 402 474 self.errors.add(sys.exc_value) 403 475 dataset.mask = zeros 404 # #Calculate the actual Q matrix476 # Calculate the actual Q matrix 405 477 try: 406 478 if dataset.q_data.size <= 1: 407 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data) 479 dataset.q_data = np.sqrt(dataset.qx_data 480 * dataset.qx_data 481 + dataset.qy_data 482 * dataset.qy_data) 408 483 except: 409 484 dataset.q_data = None … … 415 490 dataset.data = dataset.data.flatten() 416 491 417 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 492 final_dataset = combine_data_info_with_plottable( 493 dataset, self.current_datainfo) 418 494 self.output.append(final_dataset) 419 495 … … 435 511 if dataset.dy is not None: 436 512 dataset.dy = dataset.dy.astype(np.float64) 437 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 513 final_dataset = combine_data_info_with_plottable( 514 dataset, self.current_datainfo) 438 515 self.output.append(final_dataset) 439 516 440 517 def add_data_set(self, key=""): 441 518 """ 442 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 443 private method to generate a new data set. 519 Adds the current_dataset to the list of outputs after preforming final 520 processing on the data and then calls a private method to generate a 521 new data set. 444 522 445 523 :param key: NeXus group name for current tree level … … 453 531 454 532 455 def _initialize_new_data_set(self, parent_list = None): 456 """ 457 A private class method to generate a new 1D or 2D data object based on the type of data within the set. 458 Outside methods should call add_data_set() to be sure any existing data is stored properly. 533 def _initialize_new_data_set(self, parent_list=None): 534 """ 535 A private class method to generate a new 1D or 2D data object based on 536 the type of data within the set. Outside methods should call 537 add_data_set() to be sure any existing data is stored properly. 459 538 460 539 :param parent_list: List of names of parent elements … … 473 552 def _find_intermediate(self, parent_list, basename=""): 474 553 """ 475 A private class used to find an entry by either using a direct key or knowing the approximate basename. 476 477 :param parent_list: List of parents to the current level in the HDF5 file 554 A private class used to find an entry by either using a direct key or 555 knowing the approximate basename. 556 557 :param parent_list: List of parents nodes in the HDF5 file 478 558 :param basename: Approximate name of an entry to search for 479 559 :return: … … 486 566 top = top.get(parent) 487 567 for key in top.keys(): 488 if (key_prog.match(key)):568 if key_prog.match(key): 489 569 entry = True 490 570 break … … 516 596 """ 517 597 unit = value.attrs.get(u'units') 518 if unit ==None:598 if unit is None: 519 599 unit = value.attrs.get(u'unit') 520 # #Convert the unit formats600 # Convert the unit formats 521 601 if unit == "1/A": 522 602 unit = "A^{-1}"
Note: See TracChangeset
for help on using the changeset viewer.