Changeset b8080e1 in sasview for src/sas/sascalc/dataloader/readers
- Timestamp:
- Aug 29, 2018 10:01:23 AM (6 years ago)
- Branches:
- ESS_GUI, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- 9463ca2
- Parents:
- ce30949
- git-author:
- Piotr Rozyczko <rozyczko@…> (08/29/18 09:59:56)
- git-committer:
- Piotr Rozyczko <rozyczko@…> (08/29/18 10:01:23)
- Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/abs_reader.py
r1efbc190 rb8080e1 29 29 type_name = "IGOR 1D" 30 30 # Wildcards 31 type = ["IGOR 1D files (*.abs)|*.abs" ]31 type = ["IGOR 1D files (*.abs)|*.abs", "IGOR 1D USANS files (*.cor)|*.cor"] 32 32 # List of allowed extensions 33 ext = ['.abs' ]33 ext = ['.abs', '.cor'] 34 34 35 35 def get_file_contents(self): … … 46 46 self.current_datainfo = DataInfo() 47 47 self.current_datainfo.filename = filepath 48 self.reset_data_list(len(lines))49 48 detector = Detector() 50 49 data_line = 0 … … 172 171 173 172 try: 174 _x = float(toks[ 0])173 _x = float(toks[4]) 175 174 _y = float(toks[1]) 176 175 _dy = float(toks[2]) … … 188 187 self.current_dataset.y[data_line] = _y 189 188 self.current_dataset.dy[data_line] = _dy 190 self.current_dataset.dx[data_line] = _dx 189 if _dx > 0: 190 self.current_dataset.dx[data_line] = _dx 191 else: 192 if data_line == 0: 193 self.current_dataset.dx = None 194 self.current_dataset.dxl = np.zeros(len(lines)) 195 self.current_dataset.dxw = np.zeros(len(lines)) 196 self.current_dataset.dxl[data_line] = abs(_dx) 197 self.current_dataset.dxw[data_line] = 0 191 198 data_line += 1 192 199 … … 197 204 pass 198 205 206 # SANS Data: 199 207 # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 200 208 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 201 if line.count("The 6 columns") > 0: 209 # USANS Data: 210 # EMP LEVEL: <value> ; BKG LEVEL: <value> 211 if line.startswith("The 6 columns") or line.startswith("EMP LEVEL"): 202 212 is_data_started = True 203 213 -
src/sas/sascalc/dataloader/readers/associations.py
r574adc7 rb8080e1 26 26 ".dat": "red2d_reader", 27 27 ".abs": "abs_reader", 28 ".cor": "abs_reader", 28 29 ".sans": "danse_reader", 29 30 ".pdh": "anton_paar_saxs_reader" -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r2b538cd rb8080e1 68 68 data files do not appear a second time 69 69 """ 70 self.current_datainfo = None 71 self.current_dataset = None 72 self.current_data1d = None 70 super(Reader, self).reset_state() 73 71 self.data = [] 74 72 self.process = Process() … … 79 77 self.names = [] 80 78 self.cansas_defaults = {} 81 self.output = []82 79 self.ns_list = None 83 80 self.logging = [] … … 85 82 86 83 def read(self, xml_file, schema_path="", invalid=True): 87 if schema_path != "" or invalid != True:84 if schema_path != "" or not invalid: 88 85 # read has been called from self.get_file_contents because xml file doens't conform to schema 89 86 _, self.extension = os.path.splitext(os.path.basename(xml_file)) … … 945 942 pos, "z", datainfo.sample.position.z, 946 943 {"unit": datainfo.sample.position_unit}) 947 if written == True:944 if written: 948 945 self.append(pos, sample) 949 946 … … 958 955 ori, "yaw", datainfo.sample.orientation.z, 959 956 {"unit": datainfo.sample.orientation_unit}) 960 if written == True:957 if written: 961 958 self.append(ori, sample) 962 959 … … 1005 1002 size, "z", datainfo.source.beam_size.z, 1006 1003 {"unit": datainfo.source.beam_size_unit}) 1007 if written == True:1004 if written: 1008 1005 self.append(size, source) 1009 1006 … … 1061 1058 size, "z", aperture.size.z, 1062 1059 {"unit": aperture.size_unit}) 1063 if written == True:1060 if written: 1064 1061 self.append(size, apert) 1065 1062 … … 1084 1081 written = written | self.write_node(det, "SDD", item.distance, 1085 1082 {"unit": item.distance_unit}) 1086 if written == True:1083 if written: 1087 1084 self.append(det, instr) 1088 1085 … … 1094 1091 written = written | self.write_node(off, "z", item.offset.z, 1095 1092 {"unit": item.offset_unit}) 1096 if written == True:1093 if written: 1097 1094 self.append(off, det) 1098 1095 … … 1106 1103 item.orientation.z, 1107 1104 {"unit": item.orientation_unit}) 1108 if written == True:1105 if written: 1109 1106 self.append(ori, det) 1110 1107 … … 1118 1115 item.beam_center.z, 1119 1116 {"unit": item.beam_center_unit}) 1120 if written == True:1117 if written: 1121 1118 self.append(center, det) 1122 1119 … … 1128 1125 written = written | self.write_node(pix, "z", item.pixel_size.z, 1129 1126 {"unit": item.pixel_size_unit}) 1130 if written == True:1127 if written: 1131 1128 self.append(pix, det) 1132 1129 self.write_node(det, "slit_length", item.slit_length, -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc416a17 rb8080e1 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 16 17 class Reader(): 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 20 21 class Reader(FileReader): 18 22 """ 19 23 A class for reading in CanSAS v2.0 data files. The existing iteration opens … … 25 29 Any number of SASdata sets may be present in a SASentry and the data within 26 30 can be either 1D I(Q) or 2D I(Qx, Qy). 31 27 32 Also supports reading NXcanSAS formatted HDF5 files 28 33 … … 39 44 # Raw file contents to be processed 40 45 raw_data = None 41 # Data info currently being read in42 current_datainfo = None43 # SASdata set currently being read in44 current_dataset = None45 46 # List of plottable1D objects that should be linked to the current_datainfo 46 47 data1d = None … … 55 56 # Flag to bypass extension check 56 57 allow_all = True 57 # List of files to return 58 output = None 59 60 def read(self, filename): 58 59 def get_file_contents(self): 61 60 """ 62 61 This is the general read method that all SasView data_loaders must have. … … 66 65 """ 67 66 # Reinitialize when loading a new data file to reset all class variables 68 self.reset_class_variables() 67 self.reset_state() 68 69 filename = self.f_open.name 70 self.f_open.close() # IO handled by h5py 71 69 72 # Check that the file exists 70 73 if os.path.isfile(filename): … … 74 77 if extension in self.ext or self.allow_all: 75 78 # Load the data file 76 self.raw_data = h5py.File(filename, 'r') 77 # Read in all child elements of top level SASroot 78 self.read_children(self.raw_data, []) 79 # Add the last data set to the list of outputs 80 self.add_data_set() 81 # Close the data file 82 self.raw_data.close() 83 # Return data set(s) 84 return self.output 85 86 def reset_class_variables(self): 79 try: 80 self.raw_data = h5py.File(filename, 'r') 81 except Exception as e: 82 if extension not in self.ext: 83 msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension) 84 raise DefaultReaderException(msg) 85 raise FileContentsException(e.message) 86 try: 87 # Read in all child elements of top level SASroot 88 self.read_children(self.raw_data, []) 89 # Add the last data set to the list of outputs 90 self.add_data_set() 91 except Exception as exc: 92 raise FileContentsException(exc.message) 93 finally: 94 # Close the data file 95 self.raw_data.close() 96 97 for dataset in self.output: 98 if isinstance(dataset, Data1D): 99 if dataset.x.size < 5: 100 self.output = [] 101 raise FileContentsException("Fewer than 5 data points found.") 102 103 def reset_state(self): 87 104 """ 88 105 Create the reader object and define initial states for class variables 89 106 """ 90 self.current_datainfo = None 91 self.current_dataset = None 107 super(Reader, self).reset_state() 92 108 self.data1d = [] 93 109 self.data2d = [] … … 95 111 self.errors = set() 96 112 self.logging = [] 97 self.output = []98 113 self.parent_class = u'' 99 114 self.detector = Detector() … … 115 130 # Get all information for the current key 116 131 value = data.get(key) 117 if value.attrs.get(u'canSAS_class') is not None: 118 class_name = value.attrs.get(u'canSAS_class') 119 else: 120 class_name = value.attrs.get(u'NX_class') 132 class_name = h5attr(value, u'canSAS_class') 133 if class_name is None: 134 class_name = h5attr(value, u'NX_class') 121 135 if class_name is not None: 122 136 class_prog = re.compile(class_name) … … 125 139 126 140 if isinstance(value, h5py.Group): 141 # Set parent class before recursion 127 142 self.parent_class = class_name 128 143 parent_list.append(key) … … 135 150 # Recursion step to access data within the group 136 151 self.read_children(value, parent_list) 152 # Reset parent class when returning from recursive method 153 self.parent_class = class_name 137 154 self.add_intermediate() 138 155 parent_list.remove(key) … … 165 182 self.current_dataset.x = data_set.flatten() 166 183 continue 184 elif key == u'Qdev': 185 self.current_dataset.dx = data_set.flatten() 186 continue 187 elif key == u'dQw': 188 self.current_dataset.dxw = data_set.flatten() 189 continue 190 elif key == u'dQl': 191 self.current_dataset.dxl = data_set.flatten() 192 continue 167 193 elif key == u'Qy': 168 194 self.current_dataset.yaxis("Q_y", unit) … … 198 224 199 225 for data_point in data_set: 226 if isinstance(data_point, np.ndarray): 227 if data_point.dtype.char == 'S': 228 data_point = decode(bytes(data_point)) 229 else: 230 data_point = decode(data_point) 200 231 # Top Level Meta Data 201 232 if key == u'definition': … … 203 234 elif key == u'run': 204 235 self.current_datainfo.run.append(data_point) 236 try: 237 run_name = h5attr(value, 'name') 238 run_dict = {data_point: run_name} 239 self.current_datainfo.run_name = run_dict 240 except Exception: 241 pass 205 242 elif key == u'title': 206 243 self.current_datainfo.title = data_point … … 411 448 Data1D and Data2D objects 412 449 """ 413 414 450 # Type cast data arrays to float64 415 451 if len(self.current_datainfo.trans_spectrum) > 0: … … 435 471 # Type cast data arrays to float64 and find min/max as appropriate 436 472 for dataset in self.data2d: 437 dataset.data = dataset.data.astype(np.float64)438 dataset.err_data = dataset.err_data.astype(np.float64)439 if dataset.qx_data is not None:440 dataset.xmin = np.min(dataset.qx_data)441 dataset.xmax = np.max(dataset.qx_data)442 dataset.qx_data = dataset.qx_data.astype(np.float64)443 if dataset.dqx_data is not None:444 dataset.dqx_data = dataset.dqx_data.astype(np.float64)445 if dataset.qy_data is not None:446 dataset.ymin = np.min(dataset.qy_data)447 dataset.ymax = np.max(dataset.qy_data)448 dataset.qy_data = dataset.qy_data.astype(np.float64)449 if dataset.dqy_data is not None:450 dataset.dqy_data = dataset.dqy_data.astype(np.float64)451 if dataset.q_data is not None:452 dataset.q_data = dataset.q_data.astype(np.float64)453 473 zeros = np.ones(dataset.data.size, dtype=bool) 454 474 try: … … 473 493 dataset.x_bins = dataset.qx_data[:n_cols] 474 494 dataset.data = dataset.data.flatten() 475 476 final_dataset = combine_data_info_with_plottable( 477 dataset, self.current_datainfo) 478 self.output.append(final_dataset) 495 self.current_dataset = dataset 496 self.send_to_output() 479 497 480 498 for dataset in self.data1d: 481 if dataset.x is not None: 482 dataset.x = dataset.x.astype(np.float64) 483 dataset.xmin = np.min(dataset.x) 484 dataset.xmax = np.max(dataset.x) 485 if dataset.y is not None: 486 dataset.y = dataset.y.astype(np.float64) 487 dataset.ymin = np.min(dataset.y) 488 dataset.ymax = np.max(dataset.y) 489 if dataset.dx is not None: 490 dataset.dx = dataset.dx.astype(np.float64) 491 if dataset.dxl is not None: 492 dataset.dxl = dataset.dxl.astype(np.float64) 493 if dataset.dxw is not None: 494 dataset.dxw = dataset.dxw.astype(np.float64) 495 if dataset.dy is not None: 496 dataset.dy = dataset.dy.astype(np.float64) 497 final_dataset = combine_data_info_with_plottable( 498 dataset, self.current_datainfo) 499 self.output.append(final_dataset) 499 self.current_dataset = dataset 500 self.send_to_output() 500 501 501 502 def add_data_set(self, key=""): … … 579 580 :return: unit for the value passed to the method 580 581 """ 581 unit = value.attrs.get(u'units')582 unit = h5attr(value, u'units') 582 583 if unit is None: 583 unit = value.attrs.get(u'unit')584 unit = h5attr(value, u'unit') 584 585 # Convert the unit formats 585 586 if unit == "1/A": -
src/sas/sascalc/dataloader/readers/danse_reader.py
rcee5c78 rb8080e1 157 157 # Store all data 158 158 # Store wavelength 159 if has_converter == Trueand self.current_datainfo.source.wavelength_unit != 'A':159 if has_converter and self.current_datainfo.source.wavelength_unit != 'A': 160 160 conv = Converter('A') 161 161 wavelength = conv(wavelength, … … 164 164 165 165 # Store distance 166 if has_converter == Trueand detector.distance_unit != 'm':166 if has_converter and detector.distance_unit != 'm': 167 167 conv = Converter('m') 168 168 distance = conv(distance, units=detector.distance_unit) … … 170 170 171 171 # Store pixel size 172 if has_converter == Trueand detector.pixel_size_unit != 'mm':172 if has_converter and detector.pixel_size_unit != 'mm': 173 173 conv = Converter('mm') 174 174 pixel = conv(pixel, units=detector.pixel_size_unit) … … 191 191 x_vals = np.tile(x_vals, (size_y, 1)).flatten() 192 192 y_vals = np.tile(y_vals, (size_x, 1)).T.flatten() 193 if (np.all(self.current_dataset.err_data isNone)193 if (np.all(self.current_dataset.err_data == None) 194 194 or np.any(self.current_dataset.err_data <= 0)): 195 195 new_err_data = np.sqrt(np.abs(self.current_dataset.data)) -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r849094a rb8080e1 12 12 from ..file_reader_base_class import FileReader 13 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException , DataReaderException14 from ..loader_exceptions import FileContentsException 15 15 16 16 # Check whether we have a converter available … … 18 18 try: 19 19 from sas.sascalc.data_util.nxsunit import Converter 20 except :20 except ImportError: 21 21 has_converter = False 22 22 _ZERO = 1e-16 … … 46 46 line = self.nextline() 47 47 params = {} 48 while not line.startswith("BEGIN_DATA"):48 while line and not line.startswith("BEGIN_DATA"): 49 49 terms = line.split() 50 50 if len(terms) >= 2: … … 63 63 raise FileContentsException("Wavelength has no units") 64 64 if params["SpinEchoLength_unit"] != params["Wavelength_unit"]: 65 raise FileContentsException("The spin echo data has rudely used " 66 "different units for the spin echo length " 67 "and the wavelength. While sasview could " 68 "handle this instance, it is a violation " 69 "of the file format and will not be " 70 "handled by other software.") 65 raise FileContentsException( 66 "The spin echo data has rudely used " 67 "different units for the spin echo length " 68 "and the wavelength. While sasview could " 69 "handle this instance, it is a violation " 70 "of the file format and will not be " 71 "handled by other software.") 71 72 72 73 headers = self.nextline().split() … … 86 87 87 88 if not data.size: 88 raise FileContentsException("{} is empty".format( path))89 raise FileContentsException("{} is empty".format(self.filepath)) 89 90 x = data[:, headers.index("SpinEchoLength")] 90 91 if "SpinEchoLength_error" in headers:
Note: See TracChangeset
for help on using the changeset viewer.