Changeset ad52d31 in sasview for src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
- Timestamp:
- Jun 20, 2016 11:56:49 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 7673ecd
- Parents:
- 53b9fc8
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r68aa210 rad52d31 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import Data1D, Data2D, Sample, Source11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, Sample, Source 12 12 from sas.sascalc.dataloader.data_info import Process, Aperture, Collimation, TransmissionSpectrum, Detector 13 13 … … 15 15 class Reader(): 16 16 """ 17 This is a placeholder for the epic class description I plan on writing in the future. But not today. 17 A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files 18 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 19 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 20 SESANS data. This class assumes a single data set for each sasentry. 18 21 19 22 :Dependencies: … … 34 37 parent_list = None 35 38 ## Data type name 36 type_name = "CanSAS 2 D"39 type_name = "CanSAS 2.0" 37 40 ## Wildcards 38 type = ["CanSAS 2 DHDF5 Files (*.h5)|*.h5"]41 type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"] 39 42 ## List of allowed extensions 40 43 ext = ['.h5', '.H5'] … … 46 49 def __init__(self): 47 50 """ 48 Create the reader object and define initial states for c ertain class variables51 Create the reader object and define initial states for class variables 49 52 """ 50 53 self.current_dataset = None 54 self.datasets = [] 51 55 self.raw_data = None 52 56 self.errors = set() … … 64 68 def read(self, filename): 65 69 """ 66 General read method called by the top-level SasView data_loader.70 This is the general read method that all SasView data_loaders must have. 67 71 68 72 :param filename: A path for an HDF5 formatted CanSAS 2D data file. … … 70 74 """ 71 75 72 ## Reinitialize the class when loading new data file to reset all class variables76 ## Reinitialize the class when loading a new data file to reset all class variables 73 77 self.__init__() 74 78 ## Check that the file exists … … 82 86 ## Read in all child elements of top level SASroot 83 87 self.read_children(self.raw_data) 88 ## Add the last data set to the list of outputs 84 89 self.add_data_set() 85 90 ## Return data set(s) … … 88 93 def read_children(self, data, parent=u'SASroot'): 89 94 """ 90 Recursive method for stepping through the hierarchy. Stores the data95 A recursive method for stepping through the hierarchical data file. 91 96 92 97 :param data: h5py Group object of any kind … … 111 116 112 117 if isinstance(value, h5py.Group): 118 ##TODO: Rework this for multiple SASdata objects within a single SASentry to allow for both 1D and 2D 119 ##TODO: data within the same SASentry - One 1D and one 2D data object for all SASdata sets? 113 120 ## If this is a new sasentry, store the current data set and create a fresh Data1D/2D object 114 121 if class_prog.match(u'SASentry'): 115 122 self.add_data_set(key) 116 ## If the value is a group of data, iterate 117 ## TODO: If Process, Aperture, etc, store and renew 118 ##Recursion step to access data within the 119 self.read_children(data.get(key), class_name) 123 ## Recursion step to access data within the group 124 self.read_children(value, class_name) 125 self.add_intermediate(class_name) 120 126 121 127 elif isinstance(value, h5py.Dataset): 122 128 ## If this is a dataset, store the data appropriately 123 ## TODO: Add instrumental information124 129 data_set = data[key][:] 125 130 … … 171 176 self.current_dataset.mask = np.append(self.current_dataset.mask, data_point) 172 177 173 ## Other Information174 elif key == u'wavelength':175 if data_set.size > 1:176 self.trans_spectrum.wavelength.append(data_point)177 self.source.wavelength = sum(self.trans_spectrum.wavelength)\178 / len(self.trans_spectrum.wavelength)179 else:180 self.source.wavelength = data_point181 elif key == u'probe_type':182 self.source.radiation = data_point183 elif key == u'transmission':184 if data_set.size > 1:185 self.trans_spectrum.transmission.append(data_point)186 self.sample.transmission = sum(self.trans_spectrum.transmission) \187 / len(self.trans_spectrum.transmission)188 else:189 self.sample.transmission = data_point190 191 178 ## Sample Information 192 179 elif key == u'Title' and parent == u'SASsample': … … 196 183 elif key == u'temperature' and parent == u'SASsample': 197 184 self.sample.temperature = data_point 185 186 ## Instrumental Information 187 elif key == u'name' and parent == u'SASinstrument': 188 self.current_dataset.instrument = data_point 189 elif key == u'name' and parent == u'SASdetector': 190 self.detector.name = data_point 191 elif key == u'SDD' and parent == u'SASdetector': 192 self.detector.distance = data_point 193 self.detector.distance_unit = value.attrs.get(u'unit') 194 elif key == u'SSD' and parent == u'SAScollimation': 195 self.collimation.length = data_point 196 self.collimation.length_unit = value.attrs.get(u'unit') 197 elif key == u'name' and parent == u'SAScollimation': 198 self.collimation.name = data_point 198 199 199 200 ## Process Information … … 206 207 elif key == u'date' and parent == u'SASprocess': 207 208 self.process.date = data_point 209 elif parent == u'SASprocess': 210 self.process.notes.append(data_point) 211 212 ## Transmission Spectrum 213 elif key == u'T' and parent == u'SAStransmission_spectrum': 214 self.trans_spectrum.transmission.append(data_point) 215 elif key == u'Tdev' and parent == u'SAStransmission_spectrum': 216 self.trans_spectrum.transmission_deviation.append(data_point) 217 elif key == u'lambda' and parent == u'SAStransmission_spectrum': 218 self.trans_spectrum.wavelength.append(data_point) 219 220 ## Other Information 221 elif key == u'wavelength' and parent == u'SASdata': 222 self.source.wavelength = data_point 223 self.source.wavelength.unit = value.attrs.get(u'unit') 224 elif key == u'radiation' and parent == u'SASsource': 225 self.source.radiation = data_point 226 elif key == u'transmission' and parent == u'SASdata': 227 self.sample.transmission = data_point 208 228 209 229 ## Everything else goes in meta_data … … 216 236 self.errors.add("ShouldNeverHappenException") 217 237 218 return 238 def add_intermediate(self, parent): 239 """ 240 This method stores any intermediate objects within the final data set after fully reading the set. 241 242 :param parent: The NXclass name for the h5py Group object that just finished being processed 243 :return: 244 """ 245 246 if parent == u'SASprocess': 247 self.current_dataset.process.append(self.process) 248 self.process = Process() 249 elif parent == u'SASdetector': 250 self.current_dataset.detector.append(self.detector) 251 self.detector = Detector() 252 elif parent == u'SAStransmission_spectrum': 253 self.current_dataset.trans_spectrum.append(self.trans_spectrum) 254 self.trans_spectrum = TransmissionSpectrum() 255 elif parent == u'SASsource': 256 self.current_dataset.source = self.source 257 self.source = Source() 258 elif parent == u'SASsample': 259 self.current_dataset.sample = self.sample 260 self.sample = Sample() 261 elif parent == u'SAScollimation': 262 self.current_dataset.collimation.append(self.collimation) 263 self.collimation = Collimation() 264 elif parent == u'SASaperture': 265 self.collimation.aperture.append(self.aperture) 266 self.aperture = Aperture() 219 267 220 268 def final_data_cleanup(self): … … 222 270 Does some final cleanup and formatting on self.current_dataset 223 271 """ 224 ## TODO: Add all cleanup items - NOT FINISHED225 ## TODO: All strings to float64226 ## TODO: All intermediates (self.sample, etc.) put in self.current_dataset227 272 228 273 ## Type cast data arrays to float64 and find min/max as appropriate … … 292 337 self.current_dataset.dy =self.current_dataset.dy.astype(np.float64) 293 338 339 if len(self.current_dataset.trans_spectrum) is not 0: 340 spectrum_list = [] 341 for spectrum in self.current_dataset.trans_spectrum: 342 spectrum.transmission = np.delete(spectrum.transmission, [0]) 343 spectrum.transmission = spectrum.transmission.astype(np.float64) 344 spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0]) 345 spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64) 346 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 347 spectrum.wavelength = spectrum.wavelength.astype(np.float64) 348 spectrum_list.append(spectrum) 349 self.current_dataset.trans_spectrum = spectrum_list 350 294 351 else: 295 352 self.errors.add("ShouldNeverHappenException") … … 325 382 :return: None 326 383 """ 327 entry = [] 328 if key is not "": 329 entry = self.raw_data.get(key) 330 else: 331 key_prog = re.compile("sasentry*") 332 for key in self.raw_data.keys(): 333 if (key_prog.match(key)): 334 entry = self.raw_data.get(key) 335 break 384 entry = self._find_intermediate(key, "sasentry*") 336 385 data = entry.get("sasdata") 337 386 if data.get("Qx") is not None: … … 343 392 self.current_dataset.filename = self.raw_data.filename 344 393 394 def _find_intermediate(self, key="", basename=""): 395 """ 396 A private class used to find an entry by either using a direct key or knowing the approximate basename. 397 398 :param key: Exact keyname of an entry 399 :param basename: Approximate name of an entry 400 :return: 401 """ 402 entry = [] 403 if key is not "": 404 entry = self.raw_data.get(key) 405 else: 406 key_prog = re.compile(basename) 407 for key in self.raw_data.keys(): 408 if (key_prog.match(key)): 409 entry = self.raw_data.get(key) 410 break 411 return entry 412 345 413 def _create_unique_key(self, dictionary, name, numb=0): 346 414 """
Note: See TracChangeset
for help on using the changeset viewer.