Changeset d7fd7be in sasview for src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
- Timestamp:
- Dec 22, 2017 12:08:53 PM (6 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, unittest-saveload
- Children:
- 5a4d022
- Parents:
- 2651724 (diff), 0a88623 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r2651724 rd7fd7be 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 16 17 class Reader(): 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 20 21 class Reader(FileReader): 18 22 """ 19 23 A class for reading in CanSAS v2.0 data files. The existing iteration opens … … 40 44 # Raw file contents to be processed 41 45 raw_data = None 42 # Data info currently being read in43 current_datainfo = None44 # SASdata set currently being read in45 current_dataset = None46 46 # List of plottable1D objects that should be linked to the current_datainfo 47 47 data1d = None … … 56 56 # Flag to bypass extension check 57 57 allow_all = True 58 # List of files to return 59 output = None 60 61 def read(self, filename): 58 59 def get_file_contents(self): 62 60 """ 63 61 This is the general read method that all SasView data_loaders must have. … … 67 65 """ 68 66 # Reinitialize when loading a new data file to reset all class variables 69 self.reset_class_variables() 67 self.reset_state() 68 69 filename = self.f_open.name 70 self.f_open.close() # IO handled by h5py 71 70 72 # Check that the file exists 71 73 if os.path.isfile(filename): … … 75 77 if extension in self.ext or self.allow_all: 76 78 # Load the data file 77 self.raw_data = h5py.File(filename, 'r') 78 # Read in all child elements of top level SASroot 79 self.read_children(self.raw_data, []) 80 # Add the last data set to the list of outputs 81 self.add_data_set() 82 # Close the data file 83 self.raw_data.close() 84 # Return data set(s) 85 return self.output 86 87 def reset_class_variables(self): 79 try: 80 self.raw_data = h5py.File(filename, 'r') 81 except Exception as e: 82 if extension not in self.ext: 83 msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension) 84 raise DefaultReaderException(msg) 85 raise FileContentsException(e.message) 86 try: 87 # Read in all child elements of top level SASroot 88 self.read_children(self.raw_data, []) 89 # Add the last data set to the list of outputs 90 self.add_data_set() 91 except Exception as exc: 92 raise FileContentsException(exc.message) 93 finally: 94 # Close the data file 95 self.raw_data.close() 96 97 for dataset in self.output: 98 if isinstance(dataset, Data1D): 99 if dataset.x.size < 5: 100 self.output = [] 101 raise FileContentsException("Fewer than 5 data points found.") 102 103 def reset_state(self): 88 104 """ 89 105 Create the reader object and define initial states for class variables 90 106 """ 91 self.current_datainfo = None 92 self.current_dataset = None 107 super(Reader, self).reset_state() 93 108 self.data1d = [] 94 109 self.data2d = [] … … 123 138 # Get all information for the current key 124 139 value = data.get(key) 125 if value.attrs.get(u'canSAS_class') is not None: 126 class_name = value.attrs.get(u'canSAS_class') 127 elif value.attrs.get(u'NX_class') is not None: 128 class_name = value.attrs.get(u'NX_class') 129 else: 130 class_name = key 140 class_name = h5attr(value, u'canSAS_class') 141 if class_name is None: 142 class_name = h5attr(value, u'NX_class') 131 143 if class_name is not None: 132 144 class_prog = re.compile(class_name) … … 135 147 136 148 if isinstance(value, h5py.Group): 149 # Set parent class before recursion 137 150 self.parent_class = class_name 138 151 parent_list.append(key) … … 146 159 # Recursion step to access data within the group 147 160 self.read_children(value, parent_list) 161 # Reset parent class when returning from recursive method 162 self.parent_class = class_name 148 163 self.add_intermediate() 149 164 parent_list.remove(key) … … 155 170 156 171 for data_point in data_set: 172 if isinstance(data_point, np.ndarray): 173 if data_point.dtype.char == 'S': 174 data_point = decode(bytes(data_point)) 175 else: 176 data_point = decode(data_point) 157 177 # Top Level Meta Data 158 178 if key == u'definition': … … 162 182 self.current_datainfo.run.append(data_point) 163 183 try: 164 run_name = value.attrs['name']184 run_name = h5attr(value, 'name') 165 185 run_dict = {data_point: run_name} 166 186 self.current_datainfo.run_name = run_dict 167 except :187 except Exception: 168 188 pass 169 189 # Title … … 458 478 Data1D and Data2D objects 459 479 """ 460 461 480 # Type cast data arrays to float64 462 481 if len(self.current_datainfo.trans_spectrum) > 0: … … 482 501 # Type cast data arrays to float64 and find min/max as appropriate 483 502 for dataset in self.data2d: 484 dataset.data = dataset.data.astype(np.float64)485 dataset.err_data = dataset.err_data.astype(np.float64)486 if dataset.qx_data is not None:487 dataset.xmin = np.min(dataset.qx_data)488 dataset.xmax = np.max(dataset.qx_data)489 dataset.qx_data = dataset.qx_data.astype(np.float64)490 if dataset.dqx_data is not None:491 dataset.dqx_data = dataset.dqx_data.astype(np.float64)492 if dataset.qy_data is not None:493 dataset.ymin = np.min(dataset.qy_data)494 dataset.ymax = np.max(dataset.qy_data)495 dataset.qy_data = dataset.qy_data.astype(np.float64)496 if dataset.dqy_data is not None:497 dataset.dqy_data = dataset.dqy_data.astype(np.float64)498 if dataset.q_data is not None:499 dataset.q_data = dataset.q_data.astype(np.float64)500 503 zeros = np.ones(dataset.data.size, dtype=bool) 501 504 try: … … 520 523 dataset.x_bins = dataset.qx_data[:n_cols] 521 524 dataset.data = dataset.data.flatten() 522 523 final_dataset = combine_data_info_with_plottable( 524 dataset, self.current_datainfo) 525 self.output.append(final_dataset) 525 self.current_dataset = dataset 526 self.send_to_output() 526 527 527 528 for dataset in self.data1d: 528 if dataset.x is not None: 529 dataset.x = dataset.x.astype(np.float64) 530 dataset.xmin = np.min(dataset.x) 531 dataset.xmax = np.max(dataset.x) 532 if dataset.y is not None: 533 dataset.y = dataset.y.astype(np.float64) 534 dataset.ymin = np.min(dataset.y) 535 dataset.ymax = np.max(dataset.y) 536 if dataset.dx is not None: 537 dataset.dx = dataset.dx.astype(np.float64) 538 if dataset.dxl is not None: 539 dataset.dxl = dataset.dxl.astype(np.float64) 540 if dataset.dxw is not None: 541 dataset.dxw = dataset.dxw.astype(np.float64) 542 if dataset.dy is not None: 543 dataset.dy = dataset.dy.astype(np.float64) 544 final_dataset = combine_data_info_with_plottable( 545 dataset, self.current_datainfo) 546 self.output.append(final_dataset) 529 self.current_dataset = dataset 530 self.send_to_output() 547 531 548 532 def add_data_set(self, key=""): … … 651 635 :return: unit for the value passed to the method 652 636 """ 653 unit = value.attrs.get(u'units')637 unit = h5attr(value, u'units') 654 638 if unit is None: 655 unit = value.attrs.get(u'unit')639 unit = h5attr(value, u'unit') 656 640 # Convert the unit formats 657 641 if unit == "1/A":
Note: See TracChangeset
for help on using the changeset viewer.