Changeset b8080e1 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Aug 29, 2018 10:01:23 AM (6 years ago)
- Branches:
- ESS_GUI, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- 9463ca2
- Parents:
- ce30949
- git-author:
- Piotr Rozyczko <rozyczko@…> (08/29/18 09:59:56)
- git-committer:
- Piotr Rozyczko <rozyczko@…> (08/29/18 10:01:23)
- Location:
- src/sas/sascalc/dataloader
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r749b715 rb8080e1 775 775 clone.meta_data = deepcopy(self.meta_data) 776 776 clone.errors = deepcopy(self.errors) 777 clone.isSesans = self.isSesans778 777 779 778 return clone -
src/sas/sascalc/dataloader/file_reader_base_class.py
r9e6aeaf rb8080e1 7 7 import os 8 8 import sys 9 import re9 import math 10 10 import logging 11 11 from abc import abstractmethod … … 26 26 return s.decode() if isinstance(s, bytes) else s 27 27 28 # Data 1D fields for iterative purposes 29 FIELDS_1D = ('x', 'y', 'dx', 'dy', 'dxl', 'dxw') 30 # Data 2D fields for iterative purposes 31 FIELDS_2D = ('data', 'qx_data', 'qy_data', 'q_data', 'err_data', 32 'dqx_data', 'dqy_data', 'mask') 33 DEPRECATION_MESSAGE = ("\rThe extension of this file suggests the data set migh" 34 "t not be fully reduced. Support for the reader associat" 35 "ed with this file type has been removed. An attempt to " 36 "load the file was made, but, should it be successful, " 37 "SasView cannot guarantee the accuracy of the data.") 38 28 39 class FileReader(object): 29 # List of Data1D and Data2D objects to be sent back to data_loader30 output = []31 # Current plottable_(1D/2D) object being loaded in32 current_dataset = None33 # Current DataInfo object being loaded in34 current_datainfo = None35 40 # String to describe the type of data this reader can load 36 41 type_name = "ASCII" … … 39 44 # List of allowed extensions 40 45 ext = ['.txt'] 46 # Deprecated extensions 47 deprecated_extensions = ['.asc', '.nxs'] 41 48 # Bypass extension check and try to load anyway 42 49 allow_all = False 43 50 # Able to import the unit converter 44 51 has_converter = True 45 # Open file handle46 f_open = None47 52 # Default value of zero 48 53 _ZERO = 1e-16 49 54 55 def __init__(self): 56 # List of Data1D and Data2D objects to be sent back to data_loader 57 self.output = [] 58 # Current plottable_(1D/2D) object being loaded in 59 self.current_dataset = None 60 # Current DataInfo object being loaded in 61 self.current_datainfo = None 62 # File path sent to reader 63 self.filepath = None 64 # Open file handle 65 self.f_open = None 66 50 67 def read(self, filepath): 51 68 """ … … 54 71 :param filepath: The full or relative path to a file to be loaded 55 72 """ 73 self.filepath = filepath 56 74 if os.path.isfile(filepath): 57 75 basename, extension = os.path.splitext(os.path.basename(filepath)) … … 75 93 if not self.f_open.closed: 76 94 self.f_open.close() 95 if any(filepath.lower().endswith(ext) for ext in 96 self.deprecated_extensions): 97 self.handle_error_message(DEPRECATION_MESSAGE) 77 98 if len(self.output) > 0: 78 99 # Sort the data that's been loaded … … 85 106 86 107 # Return a list of parsed entries that data_loader can manage 87 return self.output 108 final_data = self.output 109 self.reset_state() 110 return final_data 111 112 def reset_state(self): 113 """ 114 Resets the class state to a base case when loading a new data file so previous 115 data files do not appear a second time 116 """ 117 self.current_datainfo = None 118 self.current_dataset = None 119 self.filepath = None 120 self.ind = None 121 self.output = [] 88 122 89 123 def nextline(self): … … 112 146 """ 113 147 Generic error handler to add an error to the current datainfo to 114 prop ogate the error up the error chain.148 propagate the error up the error chain. 115 149 :param msg: Error message 116 150 """ … … 121 155 else: 122 156 logger.warning(msg) 157 raise NoKnownLoaderException(msg) 123 158 124 159 def send_to_output(self): … … 142 177 # Sort data by increasing x and remove 1st point 143 178 ind = np.lexsort((data.y, data.x)) 144 data.x = np.asarray([data.x[i] for i in ind]).astype(np.float64)145 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64)179 data.x = self._reorder_1d_array(data.x, ind) 180 data.y = self._reorder_1d_array(data.y, ind) 146 181 if data.dx is not None: 147 182 if len(data.dx) == 0: 148 183 data.dx = None 149 184 continue 150 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64)185 data.dx = self._reorder_1d_array(data.dx, ind) 151 186 if data.dxl is not None: 152 data.dxl = np.asarray([data.dxl[i] for i in ind]).astype(np.float64)187 data.dxl = self._reorder_1d_array(data.dxl, ind) 153 188 if data.dxw is not None: 154 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64)189 data.dxw = self._reorder_1d_array(data.dxw, ind) 155 190 if data.dy is not None: 156 191 if len(data.dy) == 0: 157 192 data.dy = None 158 193 continue 159 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64)194 data.dy = self._reorder_1d_array(data.dy, ind) 160 195 if data.lam is not None: 161 data.lam = np.asarray([data.lam[i] for i in ind]).astype(np.float64)196 data.lam = self._reorder_1d_array(data.lam, ind) 162 197 if data.dlam is not None: 163 data.dlam = np.asarray([data.dlam[i] for i in ind]).astype(np.float64) 198 data.dlam = self._reorder_1d_array(data.dlam, ind) 199 data = self._remove_nans_in_data(data) 164 200 if len(data.x) > 0: 165 201 data.xmin = np.min(data.x) … … 167 203 data.ymin = np.min(data.y) 168 204 data.ymax = np.max(data.y) 205 206 @staticmethod 207 def _reorder_1d_array(array, ind): 208 """ 209 Reorders a 1D array based on the indices passed as ind 210 :param array: Array to be reordered 211 :param ind: Indices used to reorder array 212 :return: reordered array 213 """ 214 array = np.asarray(array, dtype=np.float64) 215 return array[ind] 216 217 @staticmethod 218 def _remove_nans_in_data(data): 219 """ 220 Remove data points where nan is loaded 221 :param data: 1D or 2D data object 222 :return: data with nan points removed 223 """ 224 if isinstance(data, Data1D): 225 fields = FIELDS_1D 226 elif isinstance(data, Data2D): 227 fields = FIELDS_2D 228 else: 229 return data 230 # Make array of good points - all others will be removed 231 good = np.isfinite(getattr(data, fields[0])) 232 for name in fields[1:]: 233 array = getattr(data, name) 234 if array is not None: 235 # Update good points only if not already changed 236 good &= np.isfinite(array) 237 if not np.all(good): 238 for name in fields: 239 array = getattr(data, name) 240 if array is not None: 241 setattr(data, name, array[good]) 242 return data 169 243 170 244 def sort_two_d_data(self): … … 197 271 dataset.x_bins = dataset.qx_data[:int(n_cols)] 198 272 dataset.data = dataset.data.flatten() 273 dataset = self._remove_nans_in_data(dataset) 199 274 if len(dataset.data) > 0: 200 275 dataset.xmin = np.min(dataset.qx_data) … … 314 389 def splitline(line): 315 390 """ 316 Splits a line into pieces based on common delim eters391 Splits a line into pieces based on common delimiters 317 392 :param line: A single line of text 318 393 :return: list of values -
src/sas/sascalc/dataloader/loader.py
rdc8d1c2 rb8080e1 90 90 ascii_loader = ascii_reader.Reader() 91 91 return ascii_loader.read(path) 92 except NoKnownLoaderException: 93 pass # Try the Cansas XML reader 92 94 except DefaultReaderException: 93 95 pass # Loader specific error to try the cansas XML reader … … 100 102 cansas_loader = cansas_reader.Reader() 101 103 return cansas_loader.read(path) 104 except NoKnownLoaderException: 105 pass # Try the NXcanSAS reader 102 106 except DefaultReaderException: 103 107 pass # Loader specific error to try the NXcanSAS reader -
src/sas/sascalc/dataloader/readers/abs_reader.py
r1efbc190 rb8080e1 29 29 type_name = "IGOR 1D" 30 30 # Wildcards 31 type = ["IGOR 1D files (*.abs)|*.abs" ]31 type = ["IGOR 1D files (*.abs)|*.abs", "IGOR 1D USANS files (*.cor)|*.cor"] 32 32 # List of allowed extensions 33 ext = ['.abs' ]33 ext = ['.abs', '.cor'] 34 34 35 35 def get_file_contents(self): … … 46 46 self.current_datainfo = DataInfo() 47 47 self.current_datainfo.filename = filepath 48 self.reset_data_list(len(lines))49 48 detector = Detector() 50 49 data_line = 0 … … 172 171 173 172 try: 174 _x = float(toks[ 0])173 _x = float(toks[4]) 175 174 _y = float(toks[1]) 176 175 _dy = float(toks[2]) … … 188 187 self.current_dataset.y[data_line] = _y 189 188 self.current_dataset.dy[data_line] = _dy 190 self.current_dataset.dx[data_line] = _dx 189 if _dx > 0: 190 self.current_dataset.dx[data_line] = _dx 191 else: 192 if data_line == 0: 193 self.current_dataset.dx = None 194 self.current_dataset.dxl = np.zeros(len(lines)) 195 self.current_dataset.dxw = np.zeros(len(lines)) 196 self.current_dataset.dxl[data_line] = abs(_dx) 197 self.current_dataset.dxw[data_line] = 0 191 198 data_line += 1 192 199 … … 197 204 pass 198 205 206 # SANS Data: 199 207 # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 200 208 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 201 if line.count("The 6 columns") > 0: 209 # USANS Data: 210 # EMP LEVEL: <value> ; BKG LEVEL: <value> 211 if line.startswith("The 6 columns") or line.startswith("EMP LEVEL"): 202 212 is_data_started = True 203 213 -
src/sas/sascalc/dataloader/readers/associations.py
r574adc7 rb8080e1 26 26 ".dat": "red2d_reader", 27 27 ".abs": "abs_reader", 28 ".cor": "abs_reader", 28 29 ".sans": "danse_reader", 29 30 ".pdh": "anton_paar_saxs_reader" -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r2b538cd rb8080e1 68 68 data files do not appear a second time 69 69 """ 70 self.current_datainfo = None 71 self.current_dataset = None 72 self.current_data1d = None 70 super(Reader, self).reset_state() 73 71 self.data = [] 74 72 self.process = Process() … … 79 77 self.names = [] 80 78 self.cansas_defaults = {} 81 self.output = []82 79 self.ns_list = None 83 80 self.logging = [] … … 85 82 86 83 def read(self, xml_file, schema_path="", invalid=True): 87 if schema_path != "" or invalid != True:84 if schema_path != "" or not invalid: 88 85 # read has been called from self.get_file_contents because xml file doens't conform to schema 89 86 _, self.extension = os.path.splitext(os.path.basename(xml_file)) … … 945 942 pos, "z", datainfo.sample.position.z, 946 943 {"unit": datainfo.sample.position_unit}) 947 if written == True:944 if written: 948 945 self.append(pos, sample) 949 946 … … 958 955 ori, "yaw", datainfo.sample.orientation.z, 959 956 {"unit": datainfo.sample.orientation_unit}) 960 if written == True:957 if written: 961 958 self.append(ori, sample) 962 959 … … 1005 1002 size, "z", datainfo.source.beam_size.z, 1006 1003 {"unit": datainfo.source.beam_size_unit}) 1007 if written == True:1004 if written: 1008 1005 self.append(size, source) 1009 1006 … … 1061 1058 size, "z", aperture.size.z, 1062 1059 {"unit": aperture.size_unit}) 1063 if written == True:1060 if written: 1064 1061 self.append(size, apert) 1065 1062 … … 1084 1081 written = written | self.write_node(det, "SDD", item.distance, 1085 1082 {"unit": item.distance_unit}) 1086 if written == True:1083 if written: 1087 1084 self.append(det, instr) 1088 1085 … … 1094 1091 written = written | self.write_node(off, "z", item.offset.z, 1095 1092 {"unit": item.offset_unit}) 1096 if written == True:1093 if written: 1097 1094 self.append(off, det) 1098 1095 … … 1106 1103 item.orientation.z, 1107 1104 {"unit": item.orientation_unit}) 1108 if written == True:1105 if written: 1109 1106 self.append(ori, det) 1110 1107 … … 1118 1115 item.beam_center.z, 1119 1116 {"unit": item.beam_center_unit}) 1120 if written == True:1117 if written: 1121 1118 self.append(center, det) 1122 1119 … … 1128 1125 written = written | self.write_node(pix, "z", item.pixel_size.z, 1129 1126 {"unit": item.pixel_size_unit}) 1130 if written == True:1127 if written: 1131 1128 self.append(pix, det) 1132 1129 self.write_node(det, "slit_length", item.slit_length, -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc416a17 rb8080e1 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 16 17 class Reader(): 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 20 21 class Reader(FileReader): 18 22 """ 19 23 A class for reading in CanSAS v2.0 data files. The existing iteration opens … … 25 29 Any number of SASdata sets may be present in a SASentry and the data within 26 30 can be either 1D I(Q) or 2D I(Qx, Qy). 31 27 32 Also supports reading NXcanSAS formatted HDF5 files 28 33 … … 39 44 # Raw file contents to be processed 40 45 raw_data = None 41 # Data info currently being read in42 current_datainfo = None43 # SASdata set currently being read in44 current_dataset = None45 46 # List of plottable1D objects that should be linked to the current_datainfo 46 47 data1d = None … … 55 56 # Flag to bypass extension check 56 57 allow_all = True 57 # List of files to return 58 output = None 59 60 def read(self, filename): 58 59 def get_file_contents(self): 61 60 """ 62 61 This is the general read method that all SasView data_loaders must have. … … 66 65 """ 67 66 # Reinitialize when loading a new data file to reset all class variables 68 self.reset_class_variables() 67 self.reset_state() 68 69 filename = self.f_open.name 70 self.f_open.close() # IO handled by h5py 71 69 72 # Check that the file exists 70 73 if os.path.isfile(filename): … … 74 77 if extension in self.ext or self.allow_all: 75 78 # Load the data file 76 self.raw_data = h5py.File(filename, 'r') 77 # Read in all child elements of top level SASroot 78 self.read_children(self.raw_data, []) 79 # Add the last data set to the list of outputs 80 self.add_data_set() 81 # Close the data file 82 self.raw_data.close() 83 # Return data set(s) 84 return self.output 85 86 def reset_class_variables(self): 79 try: 80 self.raw_data = h5py.File(filename, 'r') 81 except Exception as e: 82 if extension not in self.ext: 83 msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension) 84 raise DefaultReaderException(msg) 85 raise FileContentsException(e.message) 86 try: 87 # Read in all child elements of top level SASroot 88 self.read_children(self.raw_data, []) 89 # Add the last data set to the list of outputs 90 self.add_data_set() 91 except Exception as exc: 92 raise FileContentsException(exc.message) 93 finally: 94 # Close the data file 95 self.raw_data.close() 96 97 for dataset in self.output: 98 if isinstance(dataset, Data1D): 99 if dataset.x.size < 5: 100 self.output = [] 101 raise FileContentsException("Fewer than 5 data points found.") 102 103 def reset_state(self): 87 104 """ 88 105 Create the reader object and define initial states for class variables 89 106 """ 90 self.current_datainfo = None 91 self.current_dataset = None 107 super(Reader, self).reset_state() 92 108 self.data1d = [] 93 109 self.data2d = [] … … 95 111 self.errors = set() 96 112 self.logging = [] 97 self.output = []98 113 self.parent_class = u'' 99 114 self.detector = Detector() … … 115 130 # Get all information for the current key 116 131 value = data.get(key) 117 if value.attrs.get(u'canSAS_class') is not None: 118 class_name = value.attrs.get(u'canSAS_class') 119 else: 120 class_name = value.attrs.get(u'NX_class') 132 class_name = h5attr(value, u'canSAS_class') 133 if class_name is None: 134 class_name = h5attr(value, u'NX_class') 121 135 if class_name is not None: 122 136 class_prog = re.compile(class_name) … … 125 139 126 140 if isinstance(value, h5py.Group): 141 # Set parent class before recursion 127 142 self.parent_class = class_name 128 143 parent_list.append(key) … … 135 150 # Recursion step to access data within the group 136 151 self.read_children(value, parent_list) 152 # Reset parent class when returning from recursive method 153 self.parent_class = class_name 137 154 self.add_intermediate() 138 155 parent_list.remove(key) … … 165 182 self.current_dataset.x = data_set.flatten() 166 183 continue 184 elif key == u'Qdev': 185 self.current_dataset.dx = data_set.flatten() 186 continue 187 elif key == u'dQw': 188 self.current_dataset.dxw = data_set.flatten() 189 continue 190 elif key == u'dQl': 191 self.current_dataset.dxl = data_set.flatten() 192 continue 167 193 elif key == u'Qy': 168 194 self.current_dataset.yaxis("Q_y", unit) … … 198 224 199 225 for data_point in data_set: 226 if isinstance(data_point, np.ndarray): 227 if data_point.dtype.char == 'S': 228 data_point = decode(bytes(data_point)) 229 else: 230 data_point = decode(data_point) 200 231 # Top Level Meta Data 201 232 if key == u'definition': … … 203 234 elif key == u'run': 204 235 self.current_datainfo.run.append(data_point) 236 try: 237 run_name = h5attr(value, 'name') 238 run_dict = {data_point: run_name} 239 self.current_datainfo.run_name = run_dict 240 except Exception: 241 pass 205 242 elif key == u'title': 206 243 self.current_datainfo.title = data_point … … 411 448 Data1D and Data2D objects 412 449 """ 413 414 450 # Type cast data arrays to float64 415 451 if len(self.current_datainfo.trans_spectrum) > 0: … … 435 471 # Type cast data arrays to float64 and find min/max as appropriate 436 472 for dataset in self.data2d: 437 dataset.data = dataset.data.astype(np.float64)438 dataset.err_data = dataset.err_data.astype(np.float64)439 if dataset.qx_data is not None:440 dataset.xmin = np.min(dataset.qx_data)441 dataset.xmax = np.max(dataset.qx_data)442 dataset.qx_data = dataset.qx_data.astype(np.float64)443 if dataset.dqx_data is not None:444 dataset.dqx_data = dataset.dqx_data.astype(np.float64)445 if dataset.qy_data is not None:446 dataset.ymin = np.min(dataset.qy_data)447 dataset.ymax = np.max(dataset.qy_data)448 dataset.qy_data = dataset.qy_data.astype(np.float64)449 if dataset.dqy_data is not None:450 dataset.dqy_data = dataset.dqy_data.astype(np.float64)451 if dataset.q_data is not None:452 dataset.q_data = dataset.q_data.astype(np.float64)453 473 zeros = np.ones(dataset.data.size, dtype=bool) 454 474 try: … … 473 493 dataset.x_bins = dataset.qx_data[:n_cols] 474 494 dataset.data = dataset.data.flatten() 475 476 final_dataset = combine_data_info_with_plottable( 477 dataset, self.current_datainfo) 478 self.output.append(final_dataset) 495 self.current_dataset = dataset 496 self.send_to_output() 479 497 480 498 for dataset in self.data1d: 481 if dataset.x is not None: 482 dataset.x = dataset.x.astype(np.float64) 483 dataset.xmin = np.min(dataset.x) 484 dataset.xmax = np.max(dataset.x) 485 if dataset.y is not None: 486 dataset.y = dataset.y.astype(np.float64) 487 dataset.ymin = np.min(dataset.y) 488 dataset.ymax = np.max(dataset.y) 489 if dataset.dx is not None: 490 dataset.dx = dataset.dx.astype(np.float64) 491 if dataset.dxl is not None: 492 dataset.dxl = dataset.dxl.astype(np.float64) 493 if dataset.dxw is not None: 494 dataset.dxw = dataset.dxw.astype(np.float64) 495 if dataset.dy is not None: 496 dataset.dy = dataset.dy.astype(np.float64) 497 final_dataset = combine_data_info_with_plottable( 498 dataset, self.current_datainfo) 499 self.output.append(final_dataset) 499 self.current_dataset = dataset 500 self.send_to_output() 500 501 501 502 def add_data_set(self, key=""): … … 579 580 :return: unit for the value passed to the method 580 581 """ 581 unit = value.attrs.get(u'units')582 unit = h5attr(value, u'units') 582 583 if unit is None: 583 unit = value.attrs.get(u'unit')584 unit = h5attr(value, u'unit') 584 585 # Convert the unit formats 585 586 if unit == "1/A": -
src/sas/sascalc/dataloader/readers/danse_reader.py
rcee5c78 rb8080e1 157 157 # Store all data 158 158 # Store wavelength 159 if has_converter == Trueand self.current_datainfo.source.wavelength_unit != 'A':159 if has_converter and self.current_datainfo.source.wavelength_unit != 'A': 160 160 conv = Converter('A') 161 161 wavelength = conv(wavelength, … … 164 164 165 165 # Store distance 166 if has_converter == Trueand detector.distance_unit != 'm':166 if has_converter and detector.distance_unit != 'm': 167 167 conv = Converter('m') 168 168 distance = conv(distance, units=detector.distance_unit) … … 170 170 171 171 # Store pixel size 172 if has_converter == Trueand detector.pixel_size_unit != 'mm':172 if has_converter and detector.pixel_size_unit != 'mm': 173 173 conv = Converter('mm') 174 174 pixel = conv(pixel, units=detector.pixel_size_unit) … … 191 191 x_vals = np.tile(x_vals, (size_y, 1)).flatten() 192 192 y_vals = np.tile(y_vals, (size_x, 1)).T.flatten() 193 if (np.all(self.current_dataset.err_data isNone)193 if (np.all(self.current_dataset.err_data == None) 194 194 or np.any(self.current_dataset.err_data <= 0)): 195 195 new_err_data = np.sqrt(np.abs(self.current_dataset.data)) -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r849094a rb8080e1 12 12 from ..file_reader_base_class import FileReader 13 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException , DataReaderException14 from ..loader_exceptions import FileContentsException 15 15 16 16 # Check whether we have a converter available … … 18 18 try: 19 19 from sas.sascalc.data_util.nxsunit import Converter 20 except :20 except ImportError: 21 21 has_converter = False 22 22 _ZERO = 1e-16 … … 46 46 line = self.nextline() 47 47 params = {} 48 while not line.startswith("BEGIN_DATA"):48 while line and not line.startswith("BEGIN_DATA"): 49 49 terms = line.split() 50 50 if len(terms) >= 2: … … 63 63 raise FileContentsException("Wavelength has no units") 64 64 if params["SpinEchoLength_unit"] != params["Wavelength_unit"]: 65 raise FileContentsException("The spin echo data has rudely used " 66 "different units for the spin echo length " 67 "and the wavelength. While sasview could " 68 "handle this instance, it is a violation " 69 "of the file format and will not be " 70 "handled by other software.") 65 raise FileContentsException( 66 "The spin echo data has rudely used " 67 "different units for the spin echo length " 68 "and the wavelength. While sasview could " 69 "handle this instance, it is a violation " 70 "of the file format and will not be " 71 "handled by other software.") 71 72 72 73 headers = self.nextline().split() … … 86 87 87 88 if not data.size: 88 raise FileContentsException("{} is empty".format( path))89 raise FileContentsException("{} is empty".format(self.filepath)) 89 90 x = data[:, headers.index("SpinEchoLength")] 90 91 if "SpinEchoLength_error" in headers:
Note: See TracChangeset
for help on using the changeset viewer.