Changes in / [fcba29a:3931ea14] in sasview
- Files:
-
- 6 added
- 13 deleted
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
rd72567e rb699768 1073 1073 clone.source = deepcopy(self.source) 1074 1074 clone.collimation = deepcopy(self.collimation) 1075 clone.trans_spectrum = deepcopy(self.trans_spectrum)1076 1075 clone.meta_data = deepcopy(self.meta_data) 1077 1076 clone.errors = deepcopy(self.errors) … … 1227 1226 1228 1227 return result 1229 1230 1231 def combine_data_info_with_plottable(data, datainfo):1232 """1233 A function that combines the DataInfo data in self.current_datainto with a plottable_1D or 2D data object.1234 1235 :param data: A plottable_1D or plottable_2D data object1236 :return: A fully specified Data1D or Data2D object1237 """1238 1239 final_dataset = None1240 if isinstance(data, plottable_1D):1241 final_dataset = Data1D(data.x, data.y)1242 final_dataset.dx = data.dx1243 final_dataset.dy = data.dy1244 final_dataset.dxl = data.dxl1245 final_dataset.dxw = data.dxw1246 final_dataset.xaxis(data._xaxis, data._xunit)1247 final_dataset.yaxis(data._yaxis, data._yunit)1248 elif isinstance(data, plottable_2D):1249 final_dataset = Data2D(data.data, data.err_data, data.qx_data, data.qy_data, data.q_data,1250 data.mask, data.dqx_data, data.dqy_data)1251 final_dataset.xaxis(data._xaxis, data._xunit)1252 final_dataset.yaxis(data._yaxis, data._yunit)1253 final_dataset.zaxis(data._zaxis, data._zunit)1254 else:1255 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \1256 "plottable2d data object"1257 return return_string1258 1259 final_dataset.xmax = data.xmax1260 final_dataset.ymax = data.ymax1261 final_dataset.xmin = data.xmin1262 final_dataset.ymin = data.ymin1263 final_dataset.title = datainfo.title1264 final_dataset.run = datainfo.run1265 final_dataset.run_name = datainfo.run_name1266 final_dataset.filename = datainfo.filename1267 final_dataset.notes = datainfo.notes1268 final_dataset.process = datainfo.process1269 final_dataset.instrument = datainfo.instrument1270 final_dataset.detector = datainfo.detector1271 final_dataset.sample = datainfo.sample1272 final_dataset.source = datainfo.source1273 final_dataset.collimation = datainfo.collimation1274 final_dataset.trans_spectrum = datainfo.trans_spectrum1275 final_dataset.meta_data = datainfo.meta_data1276 final_dataset.errors = datainfo.errors1277 return final_dataset -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r83b6408 r8976865 34 34 from xml.dom.minidom import parseString 35 35 36 ## TODO: Refactor to load multiple <SASData> as separate Data1D objects37 ## TODO: Refactor to allow invalid XML, but give a useful warning when loaded38 39 36 _ZERO = 1e-16 40 37 PREPROCESS = "xmlpreprocess" … … 136 133 return False 137 134 138 def load_file_and_schema(self, xml_file , schema_path=""):135 def load_file_and_schema(self, xml_file): 139 136 """ 140 137 Loads the file and associates a schema, if a known schema exists … … 152 149 # Generic values for the cansas file based on the version 153 150 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 154 if schema_path == "": 155 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format\ 151 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format\ 156 152 (base, cansas_defaults.get("schema")).replace("\\", "/") 157 153 … … 160 156 return cansas_defaults 161 157 162 ## TODO: Test loading invalid CanSAS XML files and see if this works 163 ## TODO: Once works, try adding a warning that the data is invalid 164 def read(self, xml_file, schema_path=""): 158 def read(self, xml_file): 165 159 """ 166 160 Validate and read in an xml_file file in the canSAS format. … … 180 174 if extension in self.ext or self.allow_all: 181 175 # Get the file location of 182 cansas_defaults = self.load_file_and_schema(xml_file , schema_path)176 cansas_defaults = self.load_file_and_schema(xml_file) 183 177 184 178 # Try to load the file, but raise an error if unable to. … … 231 225 except: 232 226 # If the file does not match the schema, raise this error 233 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid.xsd" 234 invalid_xml = self.find_invalid_xml() 235 invalid_xml = "\n\nThe loaded xml file does not fully meet the CanSAS v1.x specification. SasView " + \ 236 "loaded as much of the data as possible.\n\n" + invalid_xml 237 self.errors.add(invalid_xml) 238 self.set_schema(schema_path) 239 if self.is_cansas(): 240 output = self.read(xml_file, schema_path) 241 else: 242 raise RuntimeError, "%s cannot be read" % xml_file 227 raise RuntimeError, "%s cannot be read" % xml_file 243 228 return output 244 229 # Return a list of parsed entries that dataloader can manage -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd72567e rd398285 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture 12 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector 13 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 14 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, Sample, Source 12 from sas.sascalc.dataloader.data_info import Process, Aperture, Collimation, TransmissionSpectrum, Detector 15 13 16 14 … … 20 18 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 21 19 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 22 SESANS data. 23 24 Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy). 20 SESANS data. This class assumes a single data set for each sasentry. 25 21 26 22 :Dependencies: 27 The CanSAS HDF5 reader requires h5py =>v2.5.0 or later.23 The CanSAS HDF5 reader requires h5py v2.5.0 or later. 28 24 """ 29 25 … … 36 32 ## Raw file contents to be processed 37 33 raw_data = None 38 ## Data info currently being read in 39 current_datainfo = None 40 ## SASdata set currently being read in 34 ## Data set being modified 41 35 current_dataset = None 42 ## List of plottable1D objects that should be linked to the current_datainfo 43 data1d = None 44 ## List of plottable2D objects that should be linked to the current_datainfo 45 data2d = None 36 ## For recursion and saving purposes, remember parent objects 37 parent_list = None 46 38 ## Data type name 47 39 type_name = "CanSAS 2.0" … … 55 47 output = None 56 48 49 def __init__(self): 50 """ 51 Create the reader object and define initial states for class variables 52 """ 53 self.current_dataset = None 54 self.datasets = [] 55 self.raw_data = None 56 self.errors = set() 57 self.logging = [] 58 self.parent_list = [] 59 self.output = [] 60 self.detector = Detector() 61 self.collimation = Collimation() 62 self.aperture = Aperture() 63 self.process = Process() 64 self.sample = Sample() 65 self.source = Source() 66 self.trans_spectrum = TransmissionSpectrum() 67 57 68 def read(self, filename): 58 69 """ … … 60 71 61 72 :param filename: A path for an HDF5 formatted CanSAS 2D data file. 62 :return: List of Data1D/2D objects and/or a list of errors.73 :return: List of Data1D/2D objects or a list of errors. 63 74 """ 64 75 65 76 ## Reinitialize the class when loading a new data file to reset all class variables 66 self. reset_class_variables()77 self.__init__() 67 78 ## Check that the file exists 68 79 if os.path.isfile(filename): … … 74 85 self.raw_data = h5py.File(filename, 'r') 75 86 ## Read in all child elements of top level SASroot 76 self.read_children(self.raw_data , [])87 self.read_children(self.raw_data) 77 88 ## Add the last data set to the list of outputs 78 89 self.add_data_set() … … 80 91 return self.output 81 92 82 def reset_class_variables(self): 83 """ 84 Create the reader object and define initial states for class variables 85 """ 86 self.current_datainfo = None 87 self.current_dataset = None 88 self.data1d = [] 89 self.data2d = [] 90 self.raw_data = None 91 self.errors = set() 92 self.logging = [] 93 self.output = [] 94 self.parent_class = u'' 95 self.detector = Detector() 96 self.collimation = Collimation() 97 self.aperture = Aperture() 98 self.process = Process() 99 self.trans_spectrum = TransmissionSpectrum() 100 101 def read_children(self, data, parent_list): 93 def read_children(self, data, parent=u'SASroot'): 102 94 """ 103 95 A recursive method for stepping through the hierarchical data file. … … 105 97 :param data: h5py Group object of any kind 106 98 :param parent: h5py Group parent name 107 """ 99 :return: None 100 """ 101 102 ## Create regex for base sasentry and for parent 103 parent_prog = re.compile(parent) 108 104 109 105 ## Loop through each element of the parent and process accordingly … … 111 107 ## Get all information for the current key 112 108 value = data.get(key) 109 attr_keys = value.attrs.keys() 110 attr_values = value.attrs.values() 113 111 if value.attrs.get(u'canSAS_class') is not None: 114 112 class_name = value.attrs.get(u'canSAS_class') … … 121 119 122 120 if isinstance(value, h5py.Group): 123 self.parent_class = class_name124 parent_list.append(key)125 ## If this is a new sasentry, store the current data set sand create a fresh Data1D/2D object121 ##TODO: Rework this for multiple SASdata objects within a single SASentry to allow for both 1D and 2D 122 ##TODO: data within the same SASentry - One 1D and one 2D data object for all SASdata sets? 123 ## If this is a new sasentry, store the current data set and create a fresh Data1D/2D object 126 124 if class_prog.match(u'SASentry'): 127 125 self.add_data_set(key) 128 elif class_prog.match(u'SASdata'):129 self._initialize_new_data_set(parent_list)130 126 ## Recursion step to access data within the group 131 self.read_children(value, parent_list) 132 self.add_intermediate() 133 parent_list.remove(key) 127 self.read_children(value, class_name) 128 self.add_intermediate(class_name) 134 129 135 130 elif isinstance(value, h5py.Dataset): … … 141 136 unit = self._get_unit(value) 142 137 if key == u'definition': 143 self.current_data info.meta_data['reader'] = data_point138 self.current_dataset.meta_data['reader'] = data_point 144 139 elif key == u'run': 145 self.current_data info.run.append(data_point)140 self.current_dataset.run.append(data_point) 146 141 elif key == u'title': 147 self.current_data info.title = data_point142 self.current_dataset.title = data_point 148 143 elif key == u'SASnote': 149 self.current_data info.notes.append(data_point)144 self.current_dataset.notes.append(data_point) 150 145 151 146 ## I and Q Data 152 147 elif key == u'I': 153 if type(self.current_dataset) is plottable_2D:148 if type(self.current_dataset) is Data2D: 154 149 self.current_dataset.data = np.append(self.current_dataset.data, data_point) 155 150 self.current_dataset.zaxis("Intensity", unit) … … 158 153 self.current_dataset.yaxis("Intensity", unit) 159 154 elif key == u'Idev': 160 if type(self.current_dataset) is plottable_2D:155 if type(self.current_dataset) is Data2D: 161 156 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point) 162 157 else: … … 164 159 elif key == u'Q': 165 160 self.current_dataset.xaxis("Q", unit) 166 if type(self.current_dataset) is plottable_2D:161 if type(self.current_dataset) is Data2D: 167 162 self.current_dataset.q = np.append(self.current_dataset.q, data_point) 168 163 else: … … 182 177 183 178 ## Sample Information 184 elif key == u'Title' and self.parent_class== u'SASsample':185 self. current_datainfo.sample.name = data_point186 elif key == u'thickness' and self.parent_class== u'SASsample':187 self. current_datainfo.sample.thickness = data_point188 elif key == u'temperature' and self.parent_class== u'SASsample':189 self. current_datainfo.sample.temperature = data_point179 elif key == u'Title' and parent == u'SASsample': 180 self.sample.name = data_point 181 elif key == u'thickness' and parent == u'SASsample': 182 self.sample.thickness = data_point 183 elif key == u'temperature' and parent == u'SASsample': 184 self.sample.temperature = data_point 190 185 191 186 ## Instrumental Information 192 elif key == u'name' and self.parent_class== u'SASinstrument':193 self.current_data info.instrument = data_point194 elif key == u'name' and self.parent_class== u'SASdetector':187 elif key == u'name' and parent == u'SASinstrument': 188 self.current_dataset.instrument = data_point 189 elif key == u'name' and parent == u'SASdetector': 195 190 self.detector.name = data_point 196 elif key == u'SDD' and self.parent_class== u'SASdetector':197 self.detector.distance = float(data_point)191 elif key == u'SDD' and parent == u'SASdetector': 192 self.detector.distance = data_point 198 193 self.detector.distance_unit = unit 199 elif key == u'SSD' and self.parent_class== u'SAScollimation':194 elif key == u'SSD' and parent == u'SAScollimation': 200 195 self.collimation.length = data_point 201 196 self.collimation.length_unit = unit 202 elif key == u'name' and self.parent_class== u'SAScollimation':197 elif key == u'name' and parent == u'SAScollimation': 203 198 self.collimation.name = data_point 204 199 205 200 ## Process Information 206 elif key == u'name' and self.parent_class== u'SASprocess':201 elif key == u'name' and parent == u'SASprocess': 207 202 self.process.name = data_point 208 elif key == u'Title' and self.parent_class== u'SASprocess':203 elif key == u'Title' and parent == u'SASprocess': 209 204 self.process.name = data_point 210 elif key == u'description' and self.parent_class== u'SASprocess':205 elif key == u'description' and parent == u'SASprocess': 211 206 self.process.description = data_point 212 elif key == u'date' and self.parent_class== u'SASprocess':207 elif key == u'date' and parent == u'SASprocess': 213 208 self.process.date = data_point 214 elif self.parent_class== u'SASprocess':209 elif parent == u'SASprocess': 215 210 self.process.notes.append(data_point) 216 211 217 212 ## Transmission Spectrum 218 elif key == u'T' and self.parent_class== u'SAStransmission_spectrum':213 elif key == u'T' and parent == u'SAStransmission_spectrum': 219 214 self.trans_spectrum.transmission.append(data_point) 220 elif key == u'Tdev' and self.parent_class== u'SAStransmission_spectrum':215 elif key == u'Tdev' and parent == u'SAStransmission_spectrum': 221 216 self.trans_spectrum.transmission_deviation.append(data_point) 222 elif key == u'lambda' and self.parent_class== u'SAStransmission_spectrum':217 elif key == u'lambda' and parent == u'SAStransmission_spectrum': 223 218 self.trans_spectrum.wavelength.append(data_point) 224 219 225 220 ## Other Information 226 elif key == u'wavelength' and self.parent_class== u'SASdata':227 self. current_datainfo.source.wavelength = data_point228 self. current_datainfo.source.wavelength.unit = unit229 elif key == u'radiation' and self.parent_class== u'SASsource':230 self. current_datainfo.source.radiation = data_point231 elif key == u'transmission' and self.parent_class== u'SASdata':232 self. current_datainfo.sample.transmission = data_point221 elif key == u'wavelength' and parent == u'SASdata': 222 self.source.wavelength = data_point 223 self.source.wavelength.unit = unit 224 elif key == u'radiation' and parent == u'SASsource': 225 self.source.radiation = data_point 226 elif key == u'transmission' and parent == u'SASdata': 227 self.sample.transmission = data_point 233 228 234 229 ## Everything else goes in meta_data 235 230 else: 236 new_key = self._create_unique_key(self.current_data info.meta_data, key)237 self.current_data info.meta_data[new_key] = data_point231 new_key = self._create_unique_key(self.current_dataset.meta_data, key) 232 self.current_dataset.meta_data[new_key] = data_point 238 233 239 234 else: … … 241 236 self.errors.add("ShouldNeverHappenException") 242 237 243 def add_intermediate(self ):238 def add_intermediate(self, parent): 244 239 """ 245 240 This method stores any intermediate objects within the final data set after fully reading the set. 246 241 247 242 :param parent: The NXclass name for the h5py Group object that just finished being processed 248 """ 249 250 if self.parent_class == u'SASprocess': 251 self.current_datainfo.process.append(self.process) 243 :return: 244 """ 245 246 if parent == u'SASprocess': 247 self.current_dataset.process.append(self.process) 252 248 self.process = Process() 253 elif self.parent_class== u'SASdetector':254 self.current_data info.detector.append(self.detector)249 elif parent == u'SASdetector': 250 self.current_dataset.detector.append(self.detector) 255 251 self.detector = Detector() 256 elif self.parent_class== u'SAStransmission_spectrum':257 self.current_data info.trans_spectrum.append(self.trans_spectrum)252 elif parent == u'SAStransmission_spectrum': 253 self.current_dataset.trans_spectrum.append(self.trans_spectrum) 258 254 self.trans_spectrum = TransmissionSpectrum() 259 elif self.parent_class == u'SAScollimation': 260 self.current_datainfo.collimation.append(self.collimation) 255 elif parent == u'SASsource': 256 self.current_dataset.source = self.source 257 self.source = Source() 258 elif parent == u'SASsample': 259 self.current_dataset.sample = self.sample 260 self.sample = Sample() 261 elif parent == u'SAScollimation': 262 self.current_dataset.collimation.append(self.collimation) 261 263 self.collimation = Collimation() 262 elif self.parent_class== u'SASaperture':264 elif parent == u'SASaperture': 263 265 self.collimation.aperture.append(self.aperture) 264 266 self.aperture = Aperture() 265 elif self.parent_class == u'SASdata':266 if type(self.current_dataset) is plottable_2D:267 self.data2d.append(self.current_dataset)268 elif type(self.current_dataset) is plottable_1D:269 self.data1d.append(self.current_dataset)270 267 271 268 def final_data_cleanup(self): 272 269 """ 273 Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then 274 combines the data and info into Data1D and Data2D objects 275 """ 276 277 ## Type cast data arrays to float64 278 if len(self.current_datainfo.trans_spectrum) > 0: 270 Does some final cleanup and formatting on self.current_dataset 271 """ 272 273 ## Type cast data arrays to float64 and find min/max as appropriate 274 if type(self.current_dataset) is Data2D: 275 self.current_dataset.data = np.delete(self.current_dataset.data, [0]) 276 self.current_dataset.data = self.current_dataset.data.astype(np.float64) 277 self.current_dataset.err_data = np.delete(self.current_dataset.err_data, [0]) 278 self.current_dataset.err_data = self.current_dataset.err_data.astype(np.float64) 279 self.current_dataset.mask = np.delete(self.current_dataset.mask, [0]) 280 if self.current_dataset.qx_data is not None: 281 self.current_dataset.qx_data = np.delete(self.current_dataset.qx_data, [0]) 282 self.current_dataset.xmin = np.min(self.current_dataset.qx_data) 283 self.current_dataset.xmax = np.max(self.current_dataset.qx_data) 284 self.current_dataset.qx_data = self.current_dataset.qx_data.astype(np.float64) 285 if self.current_dataset.dqx_data is not None: 286 self.current_dataset.dqx_data = np.delete(self.current_dataset.dqx_data, [0]) 287 self.current_dataset.dqx_data = self.current_dataset.dqx_data.astype(np.float64) 288 if self.current_dataset.qy_data is not None: 289 self.current_dataset.qy_data = np.delete(self.current_dataset.qy_data, [0]) 290 self.current_dataset.ymin = np.min(self.current_dataset.qy_data) 291 self.current_dataset.ymax = np.max(self.current_dataset.qy_data) 292 self.current_dataset.qy_data = self.current_dataset.qy_data.astype(np.float64) 293 if self.current_dataset.dqy_data is not None: 294 self.current_dataset.dqy_data = np.delete(self.current_dataset.dqy_data, [0]) 295 self.current_dataset.dqy_data = self.current_dataset.dqy_data.astype(np.float64) 296 if self.current_dataset.q_data is not None: 297 self.current_dataset.q_data = np.delete(self.current_dataset.q_data, [0]) 298 self.current_dataset.q_data = self.current_dataset.q_data.astype(np.float64) 299 zeros = np.ones(self.current_dataset.data.size, dtype=bool) 300 try: 301 for i in range (0, self.current_dataset.mask.size - 1): 302 zeros[i] = self.current_dataset.mask[i] 303 except: 304 self.errors.add(sys.exc_value) 305 self.current_dataset.mask = zeros 306 307 ## Calculate the actual Q matrix 308 try: 309 if self.current_dataset.q_data.size <= 1: 310 self.current_dataset.q_data = np.sqrt(self.current_dataset.qx_data * self.current_dataset.qx_data + 311 self.current_dataset.qy_data * self.current_dataset.qy_data) 312 except: 313 self.current_dataset.q_data = None 314 315 elif type(self.current_dataset) is Data1D: 316 if self.current_dataset.x is not None: 317 self.current_dataset.x = np.delete(self.current_dataset.x, [0]) 318 self.current_dataset.x = self.current_dataset.x.astype(np.float64) 319 self.current_dataset.xmin = np.min(self.current_dataset.x) 320 self.current_dataset.xmax = np.max(self.current_dataset.x) 321 if self.current_dataset.y is not None: 322 self.current_dataset.y = np.delete(self.current_dataset.y, [0]) 323 self.current_dataset.y = self.current_dataset.y.astype(np.float64) 324 self.current_dataset.ymin = np.min(self.current_dataset.y) 325 self.current_dataset.ymax = np.max(self.current_dataset.y) 326 if self.current_dataset.dx is not None: 327 self.current_dataset.dx = np.delete(self.current_dataset.dx, [0]) 328 self.current_dataset.dx = self.current_dataset.dx.astype(np.float64) 329 if self.current_dataset.dxl is not None: 330 self.current_dataset.dxl = np.delete(self.current_dataset.dxl, [0]) 331 self.current_dataset.dxl = self.current_dataset.dxl.astype(np.float64) 332 if self.current_dataset.dxw is not None: 333 self.current_dataset.dxw = np.delete(self.current_dataset.dxw, [0]) 334 self.current_dataset.dxw = self.current_dataset.dxw.astype(np.float64) 335 if self.current_dataset.dy is not None: 336 self.current_dataset.dy = np.delete(self.current_dataset.dy, [0]) 337 self.current_dataset.dy =self.current_dataset.dy.astype(np.float64) 338 339 if len(self.current_dataset.trans_spectrum) is not 0: 279 340 spectrum_list = [] 280 for spectrum in self.current_data info.trans_spectrum:341 for spectrum in self.current_dataset.trans_spectrum: 281 342 spectrum.transmission = np.delete(spectrum.transmission, [0]) 282 343 spectrum.transmission = spectrum.transmission.astype(np.float64) … … 285 346 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 286 347 spectrum.wavelength = spectrum.wavelength.astype(np.float64) 287 if len(spectrum.transmission) > 0: 288 spectrum_list.append(spectrum) 289 self.current_datainfo.trans_spectrum = spectrum_list 348 spectrum_list.append(spectrum) 349 self.current_dataset.trans_spectrum = spectrum_list 350 351 else: 352 self.errors.add("ShouldNeverHappenException") 353 354 ## Append intermediate objects to data 355 self.current_dataset.sample = self.sample 356 self.current_dataset.source = self.source 357 self.current_dataset.collimation.append(self.collimation) 290 358 291 359 ## Append errors to dataset and reset class errors 292 self.current_data info.errors = self.errors360 self.current_dataset.errors = self.errors 293 361 self.errors.clear() 294 295 ## Combine all plottables with datainfo and append each to output296 ## Type cast data arrays to float64 and find min/max as appropriate297 for dataset in self.data2d:298 dataset.data = np.delete(dataset.data, [0])299 dataset.data = dataset.data.astype(np.float64)300 dataset.err_data = np.delete(dataset.err_data, [0])301 dataset.err_data = dataset.err_data.astype(np.float64)302 dataset.mask = np.delete(dataset.mask, [0])303 if dataset.qx_data is not None:304 dataset.qx_data = np.delete(dataset.qx_data, [0])305 dataset.xmin = np.min(dataset.qx_data)306 dataset.xmax = np.max(dataset.qx_data)307 dataset.qx_data = dataset.qx_data.astype(np.float64)308 if dataset.dqx_data is not None:309 dataset.dqx_data = np.delete(dataset.dqx_data, [0])310 dataset.dqx_data = dataset.dqx_data.astype(np.float64)311 if dataset.qy_data is not None:312 dataset.qy_data = np.delete(dataset.qy_data, [0])313 dataset.ymin = np.min(dataset.qy_data)314 dataset.ymax = np.max(dataset.qy_data)315 dataset.qy_data = dataset.qy_data.astype(np.float64)316 if dataset.dqy_data is not None:317 dataset.dqy_data = np.delete(dataset.dqy_data, [0])318 dataset.dqy_data = dataset.dqy_data.astype(np.float64)319 if dataset.q_data is not None:320 dataset.q_data = np.delete(dataset.q_data, [0])321 dataset.q_data = dataset.q_data.astype(np.float64)322 zeros = np.ones(dataset.data.size, dtype=bool)323 try:324 for i in range (0, dataset.mask.size - 1):325 zeros[i] = dataset.mask[i]326 except:327 self.errors.add(sys.exc_value)328 dataset.mask = zeros329 ## Calculate the actual Q matrix330 try:331 if dataset.q_data.size <= 1:332 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data)333 except:334 dataset.q_data = None335 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo)336 self.output.append(final_dataset)337 338 for dataset in self.data1d:339 if dataset.x is not None:340 dataset.x = np.delete(dataset.x, [0])341 dataset.x = dataset.x.astype(np.float64)342 dataset.xmin = np.min(dataset.x)343 dataset.xmax = np.max(dataset.x)344 if dataset.y is not None:345 dataset.y = np.delete(dataset.y, [0])346 dataset.y = dataset.y.astype(np.float64)347 dataset.ymin = np.min(dataset.y)348 dataset.ymax = np.max(dataset.y)349 if dataset.dx is not None:350 dataset.dx = np.delete(dataset.dx, [0])351 dataset.dx = dataset.dx.astype(np.float64)352 if dataset.dxl is not None:353 dataset.dxl = np.delete(dataset.dxl, [0])354 dataset.dxl = dataset.dxl.astype(np.float64)355 if dataset.dxw is not None:356 dataset.dxw = np.delete(dataset.dxw, [0])357 dataset.dxw = dataset.dxw.astype(np.float64)358 if dataset.dy is not None:359 dataset.dy = np.delete(dataset.dy, [0])360 dataset.dy = dataset.dy.astype(np.float64)361 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo)362 self.output.append(final_dataset)363 362 364 363 def add_data_set(self, key=""): … … 368 367 369 368 :param key: NeXus group name for current tree level 370 """371 372 if self.current_data info and self.current_dataset:369 :return: None 370 """ 371 if self.current_dataset is not None: 373 372 self.final_data_cleanup() 374 self.data1d = [] 375 self.data2d = [] 376 self.current_datainfo = DataInfo() 377 378 def _initialize_new_data_set(self, parent_list = None): 373 self.output.append(self.current_dataset) 374 self._initialize_new_data_set(key) 375 376 def _initialize_new_data_set(self, key=""): 379 377 """ 380 378 A private class method to generate a new 1D or 2D data object based on the type of data within the set. 381 379 Outside methods should call add_data_set() to be sure any existing data is stored properly. 382 380 383 :param parent_list: List of names of parent elements384 """385 386 if parent_list is None:387 parent_list = []388 if self._find_intermediate(parent_list, "Qx"):389 self.current_dataset = plottable_2D()381 :param key: NeXus group name for current tree level 382 :return: None 383 """ 384 entry = self._find_intermediate(key, "sasentry*") 385 data = entry.get("sasdata") 386 if data.get("Qx") is not None: 387 self.current_dataset = Data2D() 390 388 else: 391 389 x = np.array(0) 392 390 y = np.array(0) 393 self.current_dataset = plottable_1D(x, y)394 self.current_data info.filename = self.raw_data.filename395 396 def _find_intermediate(self, parent_list, basename=""):391 self.current_dataset = Data1D(x, y) 392 self.current_dataset.filename = self.raw_data.filename 393 394 def _find_intermediate(self, key="", basename=""): 397 395 """ 398 396 A private class used to find an entry by either using a direct key or knowing the approximate basename. 399 397 400 :param parent_list: List of parents to the current level in the HDF5 file401 :param basename: Approximate name of an entry to search for398 :param key: Exact keyname of an entry 399 :param basename: Approximate name of an entry 402 400 :return: 403 401 """ 404 405 entry = False 406 key_prog = re.compile(basename) 407 top = self.raw_data 408 for parent in parent_list: 409 top = top.get(parent) 410 for key in top.keys(): 411 if (key_prog.match(key)): 412 entry = True 413 break 402 entry = [] 403 if key is not "": 404 entry = self.raw_data.get(key) 405 else: 406 key_prog = re.compile(basename) 407 for key in self.raw_data.keys(): 408 if (key_prog.match(key)): 409 entry = self.raw_data.get(key) 410 break 414 411 return entry 415 412 … … 422 419 :param name: The index of the item to be added to dictionary 423 420 :param numb: The number to be appended to the name, starts at 0 424 :return: The new name for the dictionary entry425 421 """ 426 422 if dictionary.get(name) is not None: … … 436 432 437 433 :param value: attribute dictionary for a particular value set 438 :return: unit for the value passed to the method434 :return: 439 435 """ 440 436 unit = value.attrs.get(u'units') 441 437 if unit == None: 442 438 unit = value.attrs.get(u'unit') 439 443 440 ## Convert the unit formats 444 441 if unit == "1/A": … … 446 443 elif unit == "1/cm": 447 444 unit = "cm^{-1}" 445 448 446 return unit -
test/sasdataloader/test/utest_cansas.py
r83b6408 rb699768 2 2 Unit tests for the new recursive cansas reader 3 3 """ 4 import logging 5 import warnings 6 warnings.simplefilter("ignore") 7 4 8 import sas.sascalc.dataloader.readers.cansas_reader as cansas 5 9 from sas.sascalc.dataloader.loader import Loader 6 from sas.sascalc.dataloader.data_info import Data1D , Data2D10 from sas.sascalc.dataloader.data_info import Data1D 7 11 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 8 12 from sas.sascalc.dataloader.readers.cansas_reader import Reader … … 16 20 import unittest 17 21 import numpy 18 import logging19 import warnings20 22 21 23 from lxml import etree 22 24 from xml.dom import minidom 23 24 warnings.simplefilter("ignore") 25 25 26 26 CANSAS_FORMAT = CansasConstants.CANSAS_FORMAT 27 27 CANSAS_NS = CansasConstants.CANSAS_NS 28 29 class cansas_reader _xml(unittest.TestCase):30 28 29 class cansas_reader(unittest.TestCase): 30 31 31 def setUp(self): 32 32 self.loader = Loader() … … 37 37 self.cansas1d_slit = "cansas1d_slit.xml" 38 38 self.cansas1d_units = "cansas1d_units.xml" 39 self.cansas1d_notitle = "cansas1d_notitle.xml"40 39 self.isis_1_0 = "ISIS_1_0.xml" 41 40 self.isis_1_1 = "ISIS_1_1.xml" … … 44 43 self.schema_1_0 = "cansas1d_v1_0.xsd" 45 44 self.schema_1_1 = "cansas1d_v1_1.xsd" 46 47 45 46 48 47 def get_number_of_entries(self, dictionary, name, i): 49 48 if dictionary.get(name) is not None: … … 53 52 name = self.get_number_of_entries(dictionary, name, i) 54 53 return name 55 56 54 55 57 56 def test_invalid_xml(self): 58 57 """ … … 61 60 invalid = StringIO.StringIO('<a><c></b></a>') 62 61 reader = XMLreader(invalid) 63 62 64 63 65 64 def test_xml_validate(self): … … 79 78 self.assertTrue(xmlschema.validate(valid)) 80 79 self.assertFalse(xmlschema.validate(invalid)) 81 82 80 81 83 82 def test_real_xml(self): 84 83 reader = XMLreader(self.xml_valid, self.schema_1_0) … … 88 87 else: 89 88 self.assertFalse(valid) 90 91 89 90 92 91 def _check_data(self, data): 93 92 self.assertTrue(data.title == "TK49 c10_SANS") … … 102 101 self.assertTrue(data.process[0].name == "Mantid generated CanSAS1D XML") 103 102 self.assertTrue(data.meta_data["xmlpreprocess"] != None) 104 105 103 104 106 105 def _check_data_1_1(self, data): 107 106 spectrum = data.trans_spectrum[0] 108 107 self.assertTrue(len(spectrum.wavelength) == 138) 109 110 108 109 111 110 def test_cansas_xml(self): 112 111 filename = "isis_1_1_write_test.xml" … … 133 132 written_data = return_data[0] 134 133 self._check_data(written_data) 135 136 134 135 137 136 def test_double_trans_spectra(self): 138 137 xmlreader = XMLreader(self.isis_1_1_doubletrans, self.schema_1_1) … … 142 141 for item in data: 143 142 self._check_data(item) 144 145 143 144 146 145 def test_entry_name_recurse(self): 147 146 test_values = [1,2,3,4,5,6] … … 152 151 d[new_key] = value 153 152 self.assertTrue(len(d) == 6) 154 155 153 154 156 155 def test_load_cansas_file(self): 157 156 valid = [] … … 170 169 reader7 = XMLreader(self.isis_1_1, self.schema_1_0) 171 170 self.assertFalse(reader7.validate_xml()) 172 173 174 def test_invalid_cansas(self): 175 list = self.loader.load(self.cansas1d_notitle) 176 data = list[0] 177 self.assertTrue(data.x.size == 2) 178 self.assertTrue(len(data.meta_data) == 3) 179 self.assertTrue(len(data.errors) == 1) 180 self.assertTrue(data.detector[0].distance_unit == "mm") 181 self.assertTrue(data.detector[0].name == "fictional hybrid") 182 self.assertTrue(data.detector[0].distance == 4150) 183 184 171 172 185 173 def test_old_cansas_files(self): 186 174 reader1 = XMLreader(self.cansas1d, self.schema_1_0) … … 194 182 reader4 = XMLreader(self.cansas1d_slit, self.schema_1_0) 195 183 self.assertTrue(reader4.validate_xml()) 196 197 184 185 198 186 def test_save_cansas_v1_0(self): 199 187 filename = "isis_1_0_write_test.xml" … … 216 204 self.assertTrue(valid) 217 205 self._check_data(written_data) 218 219 206 207 220 208 def test_processing_instructions(self): 221 209 reader = XMLreader(self.isis_1_1, self.schema_1_1) … … 226 214 self.assertTrue(dic == {'xml-stylesheet': \ 227 215 'type="text/xsl" href="cansas1d.xsl" '}) 228 216 229 217 xml = "<test><a><b><c></c></b></a></test>" 230 218 xmldoc = minidom.parseString(xml) 231 219 232 220 ## take the processing instructions and put them back in 233 221 xmldoc = self.set_processing_instructions(xmldoc, dic) 234 222 xml_output = xmldoc.toprettyxml() 235 236 223 224 237 225 def set_processing_instructions(self, minidom_object, dic): 238 226 xmlroot = minidom_object.firstChild … … 241 229 minidom_object.insertBefore(pi, xmlroot) 242 230 return minidom_object 243 244 231 232 245 233 def get_processing_instructions(self, xml_reader_object): 246 234 dict = {} … … 259 247 pi = pi.getprevious() 260 248 return dict 261 262 263 class cansas_reader_hdf5(unittest.TestCase): 264 265 def setUp(self): 266 self.loader = Loader() 267 self.datafile_basic = "simpleexamplefile.h5" 268 self.datafile_multiplesasentry = "cansas_1Dand2D_samedatafile.h5" 269 self.datafile_multiplesasdata = "cansas_1Dand2D_samesasentry.h5" 270 self.datafile_multiplesasdata_multiplesasentry = "cansas_1Dand2D_multiplesasentry_multiplesasdata.h5" 271 272 def test_real_data(self): 273 self.data = self.loader.load(self.datafile_basic) 274 self._check_example_data(self.data[0]) 275 276 def test_multiple_sasentries(self): 277 self.data = self.loader.load(self.datafile_multiplesasentry) 278 self.assertTrue(len(self.data) == 2) 279 self._check_multiple_data(self.data[0]) 280 self._check_multiple_data(self.data[1]) 281 self._check_1d_data(self.data[0]) 282 283 def _check_multiple_data(self, data): 284 self.assertTrue(data.title == "MH4_5deg_16T_SLOW") 285 self.assertTrue(data.run[0] == '33837') 286 self.assertTrue(len(data.run) == 1) 287 self.assertTrue(data.instrument == "SANS2D") 288 self.assertTrue(data.source.radiation == "Spallation Neutron Source") 289 self.assertTrue(len(data.detector) == 1) 290 self.assertTrue(data.detector[0].name == "rear-detector") 291 self.assertTrue(data.detector[0].distance == 4.385281) 292 self.assertTrue(data.detector[0].distance_unit == 'm') 293 self.assertTrue(len(data.trans_spectrum) == 1) 294 295 def _check_1d_data(self, data): 296 self.assertTrue(isinstance(data, Data1D)) 297 self.assertTrue(len(data.x) == 66) 298 self.assertTrue(len(data.x) == len(data.y)) 299 self.assertTrue(data.dy[10] == 0.20721350111248701) 300 self.assertTrue(data.y[10] == 24.193889608153476) 301 self.assertTrue(data.x[10] == 0.008981127988654792) 302 303 def _check_2d_data(self, data): 304 self.assertTrue(isinstance(data, Data2D)) 305 self.assertTrue(len(data.x) == 66) 306 self.assertTrue(len(data.x) == len(data.y)) 307 self.assertTrue(data.dy[10] == 0.20721350111248701) 308 self.assertTrue(data.y[10] == 24.193889608153476) 309 self.assertTrue(data.x[10] == 0.008981127988654792) 310 311 def _check_example_data(self, data): 312 self.assertTrue(data.title == "") 313 self.assertTrue(data.x.size == 100) 314 self.assertTrue(data._xunit == "A^{-1}") 315 self.assertTrue(data._yunit == "cm^{-1}") 316 self.assertTrue(data.y.size == 100) 317 self.assertAlmostEqual(data.y[9], 0.952749011516985) 318 self.assertAlmostEqual(data.x[9], 0.3834415188257777) 319 self.assertAlmostEqual(len(data.meta_data), 0) 320 249 321 250 322 251 if __name__ == '__main__':
Note: See TracChangeset
for help on using the changeset viewer.