Changeset 9b7c596 in sasview
- Timestamp:
- Aug 8, 2016 8:30:57 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- e070dc0
- Parents:
- 489bb46 (diff), ab06de7 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Files:
-
- 22 added
- 9 edited
- 6 moved
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
rb699768 rd72567e 1073 1073 clone.source = deepcopy(self.source) 1074 1074 clone.collimation = deepcopy(self.collimation) 1075 clone.trans_spectrum = deepcopy(self.trans_spectrum) 1075 1076 clone.meta_data = deepcopy(self.meta_data) 1076 1077 clone.errors = deepcopy(self.errors) … … 1226 1227 1227 1228 return result 1229 1230 1231 def combine_data_info_with_plottable(data, datainfo): 1232 """ 1233 A function that combines the DataInfo data in self.current_datainto with a plottable_1D or 2D data object. 1234 1235 :param data: A plottable_1D or plottable_2D data object 1236 :return: A fully specified Data1D or Data2D object 1237 """ 1238 1239 final_dataset = None 1240 if isinstance(data, plottable_1D): 1241 final_dataset = Data1D(data.x, data.y) 1242 final_dataset.dx = data.dx 1243 final_dataset.dy = data.dy 1244 final_dataset.dxl = data.dxl 1245 final_dataset.dxw = data.dxw 1246 final_dataset.xaxis(data._xaxis, data._xunit) 1247 final_dataset.yaxis(data._yaxis, data._yunit) 1248 elif isinstance(data, plottable_2D): 1249 final_dataset = Data2D(data.data, data.err_data, data.qx_data, data.qy_data, data.q_data, 1250 data.mask, data.dqx_data, data.dqy_data) 1251 final_dataset.xaxis(data._xaxis, data._xunit) 1252 final_dataset.yaxis(data._yaxis, data._yunit) 1253 final_dataset.zaxis(data._zaxis, data._zunit) 1254 else: 1255 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ 1256 "plottable2d data object" 1257 return return_string 1258 1259 final_dataset.xmax = data.xmax 1260 final_dataset.ymax = data.ymax 1261 final_dataset.xmin = data.xmin 1262 final_dataset.ymin = data.ymin 1263 final_dataset.title = datainfo.title 1264 final_dataset.run = datainfo.run 1265 final_dataset.run_name = datainfo.run_name 1266 final_dataset.filename = datainfo.filename 1267 final_dataset.notes = datainfo.notes 1268 final_dataset.process = datainfo.process 1269 final_dataset.instrument = datainfo.instrument 1270 final_dataset.detector = datainfo.detector 1271 final_dataset.sample = datainfo.sample 1272 final_dataset.source = datainfo.source 1273 final_dataset.collimation = datainfo.collimation 1274 final_dataset.trans_spectrum = datainfo.trans_spectrum 1275 final_dataset.meta_data = datainfo.meta_data 1276 final_dataset.errors = datainfo.errors 1277 return final_dataset -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r8976865 r9b7c596 34 34 from xml.dom.minidom import parseString 35 35 36 ## TODO: Refactor to load multiple <SASData> as separate Data1D objects 37 ## TODO: Refactor to allow invalid XML, but give a useful warning when loaded 38 36 39 _ZERO = 1e-16 37 40 PREPROCESS = "xmlpreprocess" … … 133 136 return False 134 137 135 def load_file_and_schema(self, xml_file ):138 def load_file_and_schema(self, xml_file, schema_path=""): 136 139 """ 137 140 Loads the file and associates a schema, if a known schema exists … … 149 152 # Generic values for the cansas file based on the version 150 153 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 151 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format\ 154 if schema_path == "": 155 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format\ 152 156 (base, cansas_defaults.get("schema")).replace("\\", "/") 153 157 … … 156 160 return cansas_defaults 157 161 158 def read(self, xml_file): 162 ## TODO: Test loading invalid CanSAS XML files and see if this works 163 ## TODO: Once works, try adding a warning that the data is invalid 164 def read(self, xml_file, schema_path=""): 159 165 """ 160 166 Validate and read in an xml_file file in the canSAS format. … … 174 180 if extension in self.ext or self.allow_all: 175 181 # Get the file location of 176 cansas_defaults = self.load_file_and_schema(xml_file )182 cansas_defaults = self.load_file_and_schema(xml_file, schema_path) 177 183 178 184 # Try to load the file, but raise an error if unable to. … … 225 231 except: 226 232 # If the file does not match the schema, raise this error 227 raise RuntimeError, "%s cannot be read" % xml_file 233 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid.xsd" 234 invalid_xml = self.find_invalid_xml() 235 invalid_xml = "\n\nThe loaded xml file does not fully meet the CanSAS v1.x specification. SasView " + \ 236 "loaded as much of the data as possible.\n\n" + invalid_xml 237 self.errors.add(invalid_xml) 238 self.set_schema(schema_path) 239 if self.is_cansas(): 240 output = self.read(xml_file, schema_path) 241 else: 242 raise RuntimeError, "%s cannot be read" % xml_file 228 243 return output 229 244 # Return a list of parsed entries that dataloader can manage -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd398285 rd72567e 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, Sample, Source 12 from sas.sascalc.dataloader.data_info import Process, Aperture, Collimation, TransmissionSpectrum, Detector 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture 12 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector 13 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 14 13 15 14 16 … … 18 20 with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 19 21 may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 20 SESANS data. This class assumes a single data set for each sasentry. 22 SESANS data. 23 24 Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy). 21 25 22 26 :Dependencies: 23 The CanSAS HDF5 reader requires h5py v2.5.0 or later.27 The CanSAS HDF5 reader requires h5py => v2.5.0 or later. 24 28 """ 25 29 … … 32 36 ## Raw file contents to be processed 33 37 raw_data = None 34 ## Data set being modified 38 ## Data info currently being read in 39 current_datainfo = None 40 ## SASdata set currently being read in 35 41 current_dataset = None 36 ## For recursion and saving purposes, remember parent objects 37 parent_list = None 42 ## List of plottable1D objects that should be linked to the current_datainfo 43 data1d = None 44 ## List of plottable2D objects that should be linked to the current_datainfo 45 data2d = None 38 46 ## Data type name 39 47 type_name = "CanSAS 2.0" … … 47 55 output = None 48 56 49 def __init__(self):50 """51 Create the reader object and define initial states for class variables52 """53 self.current_dataset = None54 self.datasets = []55 self.raw_data = None56 self.errors = set()57 self.logging = []58 self.parent_list = []59 self.output = []60 self.detector = Detector()61 self.collimation = Collimation()62 self.aperture = Aperture()63 self.process = Process()64 self.sample = Sample()65 self.source = Source()66 self.trans_spectrum = TransmissionSpectrum()67 68 57 def read(self, filename): 69 58 """ … … 71 60 72 61 :param filename: A path for an HDF5 formatted CanSAS 2D data file. 73 :return: List of Data1D/2D objects or a list of errors.62 :return: List of Data1D/2D objects and/or a list of errors. 74 63 """ 75 64 76 65 ## Reinitialize the class when loading a new data file to reset all class variables 77 self. __init__()66 self.reset_class_variables() 78 67 ## Check that the file exists 79 68 if os.path.isfile(filename): … … 85 74 self.raw_data = h5py.File(filename, 'r') 86 75 ## Read in all child elements of top level SASroot 87 self.read_children(self.raw_data )76 self.read_children(self.raw_data, []) 88 77 ## Add the last data set to the list of outputs 89 78 self.add_data_set() … … 91 80 return self.output 92 81 93 def read_children(self, data, parent=u'SASroot'): 82 def reset_class_variables(self): 83 """ 84 Create the reader object and define initial states for class variables 85 """ 86 self.current_datainfo = None 87 self.current_dataset = None 88 self.data1d = [] 89 self.data2d = [] 90 self.raw_data = None 91 self.errors = set() 92 self.logging = [] 93 self.output = [] 94 self.parent_class = u'' 95 self.detector = Detector() 96 self.collimation = Collimation() 97 self.aperture = Aperture() 98 self.process = Process() 99 self.trans_spectrum = TransmissionSpectrum() 100 101 def read_children(self, data, parent_list): 94 102 """ 95 103 A recursive method for stepping through the hierarchical data file. … … 97 105 :param data: h5py Group object of any kind 98 106 :param parent: h5py Group parent name 99 :return: None 100 """ 101 102 ## Create regex for base sasentry and for parent 103 parent_prog = re.compile(parent) 107 """ 104 108 105 109 ## Loop through each element of the parent and process accordingly … … 107 111 ## Get all information for the current key 108 112 value = data.get(key) 109 attr_keys = value.attrs.keys()110 attr_values = value.attrs.values()111 113 if value.attrs.get(u'canSAS_class') is not None: 112 114 class_name = value.attrs.get(u'canSAS_class') … … 119 121 120 122 if isinstance(value, h5py.Group): 121 ##TODO: Rework this for multiple SASdata objects within a single SASentry to allow for both 1D and 2D122 ##TODO: data within the same SASentry - One 1D and one 2D data object for all SASdata sets?123 ## If this is a new sasentry, store the current data set and create a fresh Data1D/2D object123 self.parent_class = class_name 124 parent_list.append(key) 125 ## If this is a new sasentry, store the current data sets and create a fresh Data1D/2D object 124 126 if class_prog.match(u'SASentry'): 125 127 self.add_data_set(key) 128 elif class_prog.match(u'SASdata'): 129 self._initialize_new_data_set(parent_list) 126 130 ## Recursion step to access data within the group 127 self.read_children(value, class_name) 128 self.add_intermediate(class_name) 131 self.read_children(value, parent_list) 132 self.add_intermediate() 133 parent_list.remove(key) 129 134 130 135 elif isinstance(value, h5py.Dataset): … … 136 141 unit = self._get_unit(value) 137 142 if key == u'definition': 138 self.current_data set.meta_data['reader'] = data_point143 self.current_datainfo.meta_data['reader'] = data_point 139 144 elif key == u'run': 140 self.current_data set.run.append(data_point)145 self.current_datainfo.run.append(data_point) 141 146 elif key == u'title': 142 self.current_data set.title = data_point147 self.current_datainfo.title = data_point 143 148 elif key == u'SASnote': 144 self.current_data set.notes.append(data_point)149 self.current_datainfo.notes.append(data_point) 145 150 146 151 ## I and Q Data 147 152 elif key == u'I': 148 if type(self.current_dataset) is Data2D:153 if type(self.current_dataset) is plottable_2D: 149 154 self.current_dataset.data = np.append(self.current_dataset.data, data_point) 150 155 self.current_dataset.zaxis("Intensity", unit) … … 153 158 self.current_dataset.yaxis("Intensity", unit) 154 159 elif key == u'Idev': 155 if type(self.current_dataset) is Data2D:160 if type(self.current_dataset) is plottable_2D: 156 161 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point) 157 162 else: … … 159 164 elif key == u'Q': 160 165 self.current_dataset.xaxis("Q", unit) 161 if type(self.current_dataset) is Data2D:166 if type(self.current_dataset) is plottable_2D: 162 167 self.current_dataset.q = np.append(self.current_dataset.q, data_point) 163 168 else: … … 177 182 178 183 ## Sample Information 179 elif key == u'Title' and parent== u'SASsample':180 self. sample.name = data_point181 elif key == u'thickness' and parent== u'SASsample':182 self. sample.thickness = data_point183 elif key == u'temperature' and parent== u'SASsample':184 self. sample.temperature = data_point184 elif key == u'Title' and self.parent_class == u'SASsample': 185 self.current_datainfo.sample.name = data_point 186 elif key == u'thickness' and self.parent_class == u'SASsample': 187 self.current_datainfo.sample.thickness = data_point 188 elif key == u'temperature' and self.parent_class == u'SASsample': 189 self.current_datainfo.sample.temperature = data_point 185 190 186 191 ## Instrumental Information 187 elif key == u'name' and parent== u'SASinstrument':188 self.current_data set.instrument = data_point189 elif key == u'name' and parent== u'SASdetector':192 elif key == u'name' and self.parent_class == u'SASinstrument': 193 self.current_datainfo.instrument = data_point 194 elif key == u'name' and self.parent_class == u'SASdetector': 190 195 self.detector.name = data_point 191 elif key == u'SDD' and parent== u'SASdetector':192 self.detector.distance = data_point196 elif key == u'SDD' and self.parent_class == u'SASdetector': 197 self.detector.distance = float(data_point) 193 198 self.detector.distance_unit = unit 194 elif key == u'SSD' and parent== u'SAScollimation':199 elif key == u'SSD' and self.parent_class == u'SAScollimation': 195 200 self.collimation.length = data_point 196 201 self.collimation.length_unit = unit 197 elif key == u'name' and parent== u'SAScollimation':202 elif key == u'name' and self.parent_class == u'SAScollimation': 198 203 self.collimation.name = data_point 199 204 200 205 ## Process Information 201 elif key == u'name' and parent== u'SASprocess':206 elif key == u'name' and self.parent_class == u'SASprocess': 202 207 self.process.name = data_point 203 elif key == u'Title' and parent== u'SASprocess':208 elif key == u'Title' and self.parent_class == u'SASprocess': 204 209 self.process.name = data_point 205 elif key == u'description' and parent== u'SASprocess':210 elif key == u'description' and self.parent_class == u'SASprocess': 206 211 self.process.description = data_point 207 elif key == u'date' and parent== u'SASprocess':212 elif key == u'date' and self.parent_class == u'SASprocess': 208 213 self.process.date = data_point 209 elif parent== u'SASprocess':214 elif self.parent_class == u'SASprocess': 210 215 self.process.notes.append(data_point) 211 216 212 217 ## Transmission Spectrum 213 elif key == u'T' and parent== u'SAStransmission_spectrum':218 elif key == u'T' and self.parent_class == u'SAStransmission_spectrum': 214 219 self.trans_spectrum.transmission.append(data_point) 215 elif key == u'Tdev' and parent== u'SAStransmission_spectrum':220 elif key == u'Tdev' and self.parent_class == u'SAStransmission_spectrum': 216 221 self.trans_spectrum.transmission_deviation.append(data_point) 217 elif key == u'lambda' and parent== u'SAStransmission_spectrum':222 elif key == u'lambda' and self.parent_class == u'SAStransmission_spectrum': 218 223 self.trans_spectrum.wavelength.append(data_point) 219 224 220 225 ## Other Information 221 elif key == u'wavelength' and parent== u'SASdata':222 self. source.wavelength = data_point223 self. source.wavelength.unit = unit224 elif key == u'radiation' and parent== u'SASsource':225 self. source.radiation = data_point226 elif key == u'transmission' and parent== u'SASdata':227 self. sample.transmission = data_point226 elif key == u'wavelength' and self.parent_class == u'SASdata': 227 self.current_datainfo.source.wavelength = data_point 228 self.current_datainfo.source.wavelength.unit = unit 229 elif key == u'radiation' and self.parent_class == u'SASsource': 230 self.current_datainfo.source.radiation = data_point 231 elif key == u'transmission' and self.parent_class == u'SASdata': 232 self.current_datainfo.sample.transmission = data_point 228 233 229 234 ## Everything else goes in meta_data 230 235 else: 231 new_key = self._create_unique_key(self.current_data set.meta_data, key)232 self.current_data set.meta_data[new_key] = data_point236 new_key = self._create_unique_key(self.current_datainfo.meta_data, key) 237 self.current_datainfo.meta_data[new_key] = data_point 233 238 234 239 else: … … 236 241 self.errors.add("ShouldNeverHappenException") 237 242 238 def add_intermediate(self , parent):243 def add_intermediate(self): 239 244 """ 240 245 This method stores any intermediate objects within the final data set after fully reading the set. 241 246 242 247 :param parent: The NXclass name for the h5py Group object that just finished being processed 243 :return: 244 """ 245 246 if parent == u'SASprocess': 247 self.current_dataset.process.append(self.process) 248 """ 249 250 if self.parent_class == u'SASprocess': 251 self.current_datainfo.process.append(self.process) 248 252 self.process = Process() 249 elif parent== u'SASdetector':250 self.current_data set.detector.append(self.detector)253 elif self.parent_class == u'SASdetector': 254 self.current_datainfo.detector.append(self.detector) 251 255 self.detector = Detector() 252 elif parent== u'SAStransmission_spectrum':253 self.current_data set.trans_spectrum.append(self.trans_spectrum)256 elif self.parent_class == u'SAStransmission_spectrum': 257 self.current_datainfo.trans_spectrum.append(self.trans_spectrum) 254 258 self.trans_spectrum = TransmissionSpectrum() 255 elif parent == u'SASsource': 256 self.current_dataset.source = self.source 257 self.source = Source() 258 elif parent == u'SASsample': 259 self.current_dataset.sample = self.sample 260 self.sample = Sample() 261 elif parent == u'SAScollimation': 262 self.current_dataset.collimation.append(self.collimation) 259 elif self.parent_class == u'SAScollimation': 260 self.current_datainfo.collimation.append(self.collimation) 263 261 self.collimation = Collimation() 264 elif parent== u'SASaperture':262 elif self.parent_class == u'SASaperture': 265 263 self.collimation.aperture.append(self.aperture) 266 264 self.aperture = Aperture() 265 elif self.parent_class == u'SASdata': 266 if type(self.current_dataset) is plottable_2D: 267 self.data2d.append(self.current_dataset) 268 elif type(self.current_dataset) is plottable_1D: 269 self.data1d.append(self.current_dataset) 267 270 268 271 def final_data_cleanup(self): 269 272 """ 270 Does some final cleanup and formatting on self.current_dataset 271 """ 272 273 ## Type cast data arrays to float64 and find min/max as appropriate 274 if type(self.current_dataset) is Data2D: 275 self.current_dataset.data = np.delete(self.current_dataset.data, [0]) 276 self.current_dataset.data = self.current_dataset.data.astype(np.float64) 277 self.current_dataset.err_data = np.delete(self.current_dataset.err_data, [0]) 278 self.current_dataset.err_data = self.current_dataset.err_data.astype(np.float64) 279 self.current_dataset.mask = np.delete(self.current_dataset.mask, [0]) 280 if self.current_dataset.qx_data is not None: 281 self.current_dataset.qx_data = np.delete(self.current_dataset.qx_data, [0]) 282 self.current_dataset.xmin = np.min(self.current_dataset.qx_data) 283 self.current_dataset.xmax = np.max(self.current_dataset.qx_data) 284 self.current_dataset.qx_data = self.current_dataset.qx_data.astype(np.float64) 285 if self.current_dataset.dqx_data is not None: 286 self.current_dataset.dqx_data = np.delete(self.current_dataset.dqx_data, [0]) 287 self.current_dataset.dqx_data = self.current_dataset.dqx_data.astype(np.float64) 288 if self.current_dataset.qy_data is not None: 289 self.current_dataset.qy_data = np.delete(self.current_dataset.qy_data, [0]) 290 self.current_dataset.ymin = np.min(self.current_dataset.qy_data) 291 self.current_dataset.ymax = np.max(self.current_dataset.qy_data) 292 self.current_dataset.qy_data = self.current_dataset.qy_data.astype(np.float64) 293 if self.current_dataset.dqy_data is not None: 294 self.current_dataset.dqy_data = np.delete(self.current_dataset.dqy_data, [0]) 295 self.current_dataset.dqy_data = self.current_dataset.dqy_data.astype(np.float64) 296 if self.current_dataset.q_data is not None: 297 self.current_dataset.q_data = np.delete(self.current_dataset.q_data, [0]) 298 self.current_dataset.q_data = self.current_dataset.q_data.astype(np.float64) 299 zeros = np.ones(self.current_dataset.data.size, dtype=bool) 300 try: 301 for i in range (0, self.current_dataset.mask.size - 1): 302 zeros[i] = self.current_dataset.mask[i] 303 except: 304 self.errors.add(sys.exc_value) 305 self.current_dataset.mask = zeros 306 307 ## Calculate the actual Q matrix 308 try: 309 if self.current_dataset.q_data.size <= 1: 310 self.current_dataset.q_data = np.sqrt(self.current_dataset.qx_data * self.current_dataset.qx_data + 311 self.current_dataset.qy_data * self.current_dataset.qy_data) 312 except: 313 self.current_dataset.q_data = None 314 315 elif type(self.current_dataset) is Data1D: 316 if self.current_dataset.x is not None: 317 self.current_dataset.x = np.delete(self.current_dataset.x, [0]) 318 self.current_dataset.x = self.current_dataset.x.astype(np.float64) 319 self.current_dataset.xmin = np.min(self.current_dataset.x) 320 self.current_dataset.xmax = np.max(self.current_dataset.x) 321 if self.current_dataset.y is not None: 322 self.current_dataset.y = np.delete(self.current_dataset.y, [0]) 323 self.current_dataset.y = self.current_dataset.y.astype(np.float64) 324 self.current_dataset.ymin = np.min(self.current_dataset.y) 325 self.current_dataset.ymax = np.max(self.current_dataset.y) 326 if self.current_dataset.dx is not None: 327 self.current_dataset.dx = np.delete(self.current_dataset.dx, [0]) 328 self.current_dataset.dx = self.current_dataset.dx.astype(np.float64) 329 if self.current_dataset.dxl is not None: 330 self.current_dataset.dxl = np.delete(self.current_dataset.dxl, [0]) 331 self.current_dataset.dxl = self.current_dataset.dxl.astype(np.float64) 332 if self.current_dataset.dxw is not None: 333 self.current_dataset.dxw = np.delete(self.current_dataset.dxw, [0]) 334 self.current_dataset.dxw = self.current_dataset.dxw.astype(np.float64) 335 if self.current_dataset.dy is not None: 336 self.current_dataset.dy = np.delete(self.current_dataset.dy, [0]) 337 self.current_dataset.dy =self.current_dataset.dy.astype(np.float64) 338 339 if len(self.current_dataset.trans_spectrum) is not 0: 273 Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then 274 combines the data and info into Data1D and Data2D objects 275 """ 276 277 ## Type cast data arrays to float64 278 if len(self.current_datainfo.trans_spectrum) > 0: 340 279 spectrum_list = [] 341 for spectrum in self.current_data set.trans_spectrum:280 for spectrum in self.current_datainfo.trans_spectrum: 342 281 spectrum.transmission = np.delete(spectrum.transmission, [0]) 343 282 spectrum.transmission = spectrum.transmission.astype(np.float64) … … 346 285 spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 347 286 spectrum.wavelength = spectrum.wavelength.astype(np.float64) 348 spectrum_list.append(spectrum) 349 self.current_dataset.trans_spectrum = spectrum_list 350 351 else: 352 self.errors.add("ShouldNeverHappenException") 353 354 ## Append intermediate objects to data 355 self.current_dataset.sample = self.sample 356 self.current_dataset.source = self.source 357 self.current_dataset.collimation.append(self.collimation) 287 if len(spectrum.transmission) > 0: 288 spectrum_list.append(spectrum) 289 self.current_datainfo.trans_spectrum = spectrum_list 358 290 359 291 ## Append errors to dataset and reset class errors 360 self.current_data set.errors = self.errors292 self.current_datainfo.errors = self.errors 361 293 self.errors.clear() 294 295 ## Combine all plottables with datainfo and append each to output 296 ## Type cast data arrays to float64 and find min/max as appropriate 297 for dataset in self.data2d: 298 dataset.data = np.delete(dataset.data, [0]) 299 dataset.data = dataset.data.astype(np.float64) 300 dataset.err_data = np.delete(dataset.err_data, [0]) 301 dataset.err_data = dataset.err_data.astype(np.float64) 302 dataset.mask = np.delete(dataset.mask, [0]) 303 if dataset.qx_data is not None: 304 dataset.qx_data = np.delete(dataset.qx_data, [0]) 305 dataset.xmin = np.min(dataset.qx_data) 306 dataset.xmax = np.max(dataset.qx_data) 307 dataset.qx_data = dataset.qx_data.astype(np.float64) 308 if dataset.dqx_data is not None: 309 dataset.dqx_data = np.delete(dataset.dqx_data, [0]) 310 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 311 if dataset.qy_data is not None: 312 dataset.qy_data = np.delete(dataset.qy_data, [0]) 313 dataset.ymin = np.min(dataset.qy_data) 314 dataset.ymax = np.max(dataset.qy_data) 315 dataset.qy_data = dataset.qy_data.astype(np.float64) 316 if dataset.dqy_data is not None: 317 dataset.dqy_data = np.delete(dataset.dqy_data, [0]) 318 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 319 if dataset.q_data is not None: 320 dataset.q_data = np.delete(dataset.q_data, [0]) 321 dataset.q_data = dataset.q_data.astype(np.float64) 322 zeros = np.ones(dataset.data.size, dtype=bool) 323 try: 324 for i in range (0, dataset.mask.size - 1): 325 zeros[i] = dataset.mask[i] 326 except: 327 self.errors.add(sys.exc_value) 328 dataset.mask = zeros 329 ## Calculate the actual Q matrix 330 try: 331 if dataset.q_data.size <= 1: 332 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data) 333 except: 334 dataset.q_data = None 335 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 336 self.output.append(final_dataset) 337 338 for dataset in self.data1d: 339 if dataset.x is not None: 340 dataset.x = np.delete(dataset.x, [0]) 341 dataset.x = dataset.x.astype(np.float64) 342 dataset.xmin = np.min(dataset.x) 343 dataset.xmax = np.max(dataset.x) 344 if dataset.y is not None: 345 dataset.y = np.delete(dataset.y, [0]) 346 dataset.y = dataset.y.astype(np.float64) 347 dataset.ymin = np.min(dataset.y) 348 dataset.ymax = np.max(dataset.y) 349 if dataset.dx is not None: 350 dataset.dx = np.delete(dataset.dx, [0]) 351 dataset.dx = dataset.dx.astype(np.float64) 352 if dataset.dxl is not None: 353 dataset.dxl = np.delete(dataset.dxl, [0]) 354 dataset.dxl = dataset.dxl.astype(np.float64) 355 if dataset.dxw is not None: 356 dataset.dxw = np.delete(dataset.dxw, [0]) 357 dataset.dxw = dataset.dxw.astype(np.float64) 358 if dataset.dy is not None: 359 dataset.dy = np.delete(dataset.dy, [0]) 360 dataset.dy = dataset.dy.astype(np.float64) 361 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 362 self.output.append(final_dataset) 362 363 363 364 def add_data_set(self, key=""): … … 367 368 368 369 :param key: NeXus group name for current tree level 369 :return: None370 """ 371 if self.current_data set is not None:370 """ 371 372 if self.current_datainfo and self.current_dataset: 372 373 self.final_data_cleanup() 373 self.output.append(self.current_dataset) 374 self._initialize_new_data_set(key) 375 376 def _initialize_new_data_set(self, key=""): 374 self.data1d = [] 375 self.data2d = [] 376 self.current_datainfo = DataInfo() 377 378 def _initialize_new_data_set(self, parent_list = None): 377 379 """ 378 380 A private class method to generate a new 1D or 2D data object based on the type of data within the set. 379 381 Outside methods should call add_data_set() to be sure any existing data is stored properly. 380 382 381 :param key: NeXus group name for current tree level382 :return: None383 """ 384 entry = self._find_intermediate(key, "sasentry*")385 data = entry.get("sasdata")386 if data.get("Qx") is not None:387 self.current_dataset = Data2D()383 :param parent_list: List of names of parent elements 384 """ 385 386 if parent_list is None: 387 parent_list = [] 388 if self._find_intermediate(parent_list, "Qx"): 389 self.current_dataset = plottable_2D() 388 390 else: 389 391 x = np.array(0) 390 392 y = np.array(0) 391 self.current_dataset = Data1D(x, y)392 self.current_data set.filename = self.raw_data.filename393 394 def _find_intermediate(self, key="", basename=""):393 self.current_dataset = plottable_1D(x, y) 394 self.current_datainfo.filename = self.raw_data.filename 395 396 def _find_intermediate(self, parent_list, basename=""): 395 397 """ 396 398 A private class used to find an entry by either using a direct key or knowing the approximate basename. 397 399 398 :param key: Exact keyname of an entry399 :param basename: Approximate name of an entry 400 :param parent_list: List of parents to the current level in the HDF5 file 401 :param basename: Approximate name of an entry to search for 400 402 :return: 401 403 """ 402 entry = [] 403 if key is not "": 404 entry = self.raw_data.get(key) 405 else: 406 key_prog = re.compile(basename) 407 for key in self.raw_data.keys(): 408 if (key_prog.match(key)): 409 entry = self.raw_data.get(key) 410 break 404 405 entry = False 406 key_prog = re.compile(basename) 407 top = self.raw_data 408 for parent in parent_list: 409 top = top.get(parent) 410 for key in top.keys(): 411 if (key_prog.match(key)): 412 entry = True 413 break 411 414 return entry 412 415 … … 419 422 :param name: The index of the item to be added to dictionary 420 423 :param numb: The number to be appended to the name, starts at 0 424 :return: The new name for the dictionary entry 421 425 """ 422 426 if dictionary.get(name) is not None: … … 432 436 433 437 :param value: attribute dictionary for a particular value set 434 :return: 438 :return: unit for the value passed to the method 435 439 """ 436 440 unit = value.attrs.get(u'units') 437 441 if unit == None: 438 442 unit = value.attrs.get(u'unit') 439 440 443 ## Convert the unit formats 441 444 if unit == "1/A": … … 443 446 elif unit == "1/cm": 444 447 unit = "cm^{-1}" 445 446 448 return unit -
test/sasdataloader/test/utest_cansas.py
rb699768 r83b6408 2 2 Unit tests for the new recursive cansas reader 3 3 """ 4 import logging5 import warnings6 warnings.simplefilter("ignore")7 8 4 import sas.sascalc.dataloader.readers.cansas_reader as cansas 9 5 from sas.sascalc.dataloader.loader import Loader 10 from sas.sascalc.dataloader.data_info import Data1D 6 from sas.sascalc.dataloader.data_info import Data1D, Data2D 11 7 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 12 8 from sas.sascalc.dataloader.readers.cansas_reader import Reader … … 20 16 import unittest 21 17 import numpy 18 import logging 19 import warnings 22 20 23 21 from lxml import etree 24 22 from xml.dom import minidom 25 23 24 warnings.simplefilter("ignore") 25 26 26 CANSAS_FORMAT = CansasConstants.CANSAS_FORMAT 27 27 CANSAS_NS = CansasConstants.CANSAS_NS 28 29 class cansas_reader (unittest.TestCase):30 28 29 class cansas_reader_xml(unittest.TestCase): 30 31 31 def setUp(self): 32 32 self.loader = Loader() … … 37 37 self.cansas1d_slit = "cansas1d_slit.xml" 38 38 self.cansas1d_units = "cansas1d_units.xml" 39 self.cansas1d_notitle = "cansas1d_notitle.xml" 39 40 self.isis_1_0 = "ISIS_1_0.xml" 40 41 self.isis_1_1 = "ISIS_1_1.xml" … … 43 44 self.schema_1_0 = "cansas1d_v1_0.xsd" 44 45 self.schema_1_1 = "cansas1d_v1_1.xsd" 45 46 46 47 47 48 def get_number_of_entries(self, dictionary, name, i): 48 49 if dictionary.get(name) is not None: … … 52 53 name = self.get_number_of_entries(dictionary, name, i) 53 54 return name 54 55 55 56 56 57 def test_invalid_xml(self): 57 58 """ … … 60 61 invalid = StringIO.StringIO('<a><c></b></a>') 61 62 reader = XMLreader(invalid) 62 63 63 64 64 65 def test_xml_validate(self): … … 78 79 self.assertTrue(xmlschema.validate(valid)) 79 80 self.assertFalse(xmlschema.validate(invalid)) 80 81 81 82 82 83 def test_real_xml(self): 83 84 reader = XMLreader(self.xml_valid, self.schema_1_0) … … 87 88 else: 88 89 self.assertFalse(valid) 89 90 90 91 91 92 def _check_data(self, data): 92 93 self.assertTrue(data.title == "TK49 c10_SANS") … … 101 102 self.assertTrue(data.process[0].name == "Mantid generated CanSAS1D XML") 102 103 self.assertTrue(data.meta_data["xmlpreprocess"] != None) 103 104 104 105 105 106 def _check_data_1_1(self, data): 106 107 spectrum = data.trans_spectrum[0] 107 108 self.assertTrue(len(spectrum.wavelength) == 138) 108 109 109 110 110 111 def test_cansas_xml(self): 111 112 filename = "isis_1_1_write_test.xml" … … 132 133 written_data = return_data[0] 133 134 self._check_data(written_data) 134 135 135 136 136 137 def test_double_trans_spectra(self): 137 138 xmlreader = XMLreader(self.isis_1_1_doubletrans, self.schema_1_1) … … 141 142 for item in data: 142 143 self._check_data(item) 143 144 144 145 145 146 def test_entry_name_recurse(self): 146 147 test_values = [1,2,3,4,5,6] … … 151 152 d[new_key] = value 152 153 self.assertTrue(len(d) == 6) 153 154 154 155 155 156 def test_load_cansas_file(self): 156 157 valid = [] … … 169 170 reader7 = XMLreader(self.isis_1_1, self.schema_1_0) 170 171 self.assertFalse(reader7.validate_xml()) 171 172 172 173 174 def test_invalid_cansas(self): 175 list = self.loader.load(self.cansas1d_notitle) 176 data = list[0] 177 self.assertTrue(data.x.size == 2) 178 self.assertTrue(len(data.meta_data) == 3) 179 self.assertTrue(len(data.errors) == 1) 180 self.assertTrue(data.detector[0].distance_unit == "mm") 181 self.assertTrue(data.detector[0].name == "fictional hybrid") 182 self.assertTrue(data.detector[0].distance == 4150) 183 184 173 185 def test_old_cansas_files(self): 174 186 reader1 = XMLreader(self.cansas1d, self.schema_1_0) … … 182 194 reader4 = XMLreader(self.cansas1d_slit, self.schema_1_0) 183 195 self.assertTrue(reader4.validate_xml()) 184 185 196 197 186 198 def test_save_cansas_v1_0(self): 187 199 filename = "isis_1_0_write_test.xml" … … 204 216 self.assertTrue(valid) 205 217 self._check_data(written_data) 206 207 218 219 208 220 def test_processing_instructions(self): 209 221 reader = XMLreader(self.isis_1_1, self.schema_1_1) … … 214 226 self.assertTrue(dic == {'xml-stylesheet': \ 215 227 'type="text/xsl" href="cansas1d.xsl" '}) 216 228 217 229 xml = "<test><a><b><c></c></b></a></test>" 218 230 xmldoc = minidom.parseString(xml) 219 231 220 232 ## take the processing instructions and put them back in 221 233 xmldoc = self.set_processing_instructions(xmldoc, dic) 222 234 xml_output = xmldoc.toprettyxml() 223 224 235 236 225 237 def set_processing_instructions(self, minidom_object, dic): 226 238 xmlroot = minidom_object.firstChild … … 229 241 minidom_object.insertBefore(pi, xmlroot) 230 242 return minidom_object 231 232 243 244 233 245 def get_processing_instructions(self, xml_reader_object): 234 246 dict = {} … … 247 259 pi = pi.getprevious() 248 260 return dict 249 261 262 263 class cansas_reader_hdf5(unittest.TestCase): 264 265 def setUp(self): 266 self.loader = Loader() 267 self.datafile_basic = "simpleexamplefile.h5" 268 self.datafile_multiplesasentry = "cansas_1Dand2D_samedatafile.h5" 269 self.datafile_multiplesasdata = "cansas_1Dand2D_samesasentry.h5" 270 self.datafile_multiplesasdata_multiplesasentry = "cansas_1Dand2D_multiplesasentry_multiplesasdata.h5" 271 272 def test_real_data(self): 273 self.data = self.loader.load(self.datafile_basic) 274 self._check_example_data(self.data[0]) 275 276 def test_multiple_sasentries(self): 277 self.data = self.loader.load(self.datafile_multiplesasentry) 278 self.assertTrue(len(self.data) == 2) 279 self._check_multiple_data(self.data[0]) 280 self._check_multiple_data(self.data[1]) 281 self._check_1d_data(self.data[0]) 282 283 def _check_multiple_data(self, data): 284 self.assertTrue(data.title == "MH4_5deg_16T_SLOW") 285 self.assertTrue(data.run[0] == '33837') 286 self.assertTrue(len(data.run) == 1) 287 self.assertTrue(data.instrument == "SANS2D") 288 self.assertTrue(data.source.radiation == "Spallation Neutron Source") 289 self.assertTrue(len(data.detector) == 1) 290 self.assertTrue(data.detector[0].name == "rear-detector") 291 self.assertTrue(data.detector[0].distance == 4.385281) 292 self.assertTrue(data.detector[0].distance_unit == 'm') 293 self.assertTrue(len(data.trans_spectrum) == 1) 294 295 def _check_1d_data(self, data): 296 self.assertTrue(isinstance(data, Data1D)) 297 self.assertTrue(len(data.x) == 66) 298 self.assertTrue(len(data.x) == len(data.y)) 299 self.assertTrue(data.dy[10] == 0.20721350111248701) 300 self.assertTrue(data.y[10] == 24.193889608153476) 301 self.assertTrue(data.x[10] == 0.008981127988654792) 302 303 def _check_2d_data(self, data): 304 self.assertTrue(isinstance(data, Data2D)) 305 self.assertTrue(len(data.x) == 66) 306 self.assertTrue(len(data.x) == len(data.y)) 307 self.assertTrue(data.dy[10] == 0.20721350111248701) 308 self.assertTrue(data.y[10] == 24.193889608153476) 309 self.assertTrue(data.x[10] == 0.008981127988654792) 310 311 def _check_example_data(self, data): 312 self.assertTrue(data.title == "") 313 self.assertTrue(data.x.size == 100) 314 self.assertTrue(data._xunit == "A^{-1}") 315 self.assertTrue(data._yunit == "cm^{-1}") 316 self.assertTrue(data.y.size == 100) 317 self.assertAlmostEqual(data.y[9], 0.952749011516985) 318 self.assertAlmostEqual(data.x[9], 0.3834415188257777) 319 self.assertAlmostEqual(len(data.meta_data), 0) 320 250 321 251 322 if __name__ == '__main__': -
.gitignore
re04f87b rdf332d8 8 8 # generated. 9 9 # 10 # Feel free to add more stuff to this as and when it becomes an issue. 10 # Feel free to add more stuff to this as and when it becomes an issue. 11 11 12 12 .project … … 50 50 /test/sasdataloader/test/plugins.zip 51 51 /test/sasdataloader/test/test_log.txt 52 /test/sasdataloader/test/isis_1_0_write_test.xml 53 /test/sasdataloader/test/isis_1_1_write_test.xml 54 /test/sasdataloader/test/write_test.xml 52 55 53 56 # autogenerated scripts 54 57 /sasview/installer.iss 55 -
docs/sphinx-docs/source/user/tools.rst
r8f46df7 reb8da5f 8 8 9 9 Data Operations Utility <sasgui/perspectives/calculator/data_operator_help> 10 10 11 11 Density/Volume Calculator <sasgui/perspectives/calculator/density_calculator_help> 12 12 13 13 Generic SANS Calculator <sasgui/perspectives/calculator/sas_calculator_help> 14 14 15 15 Image Viewer <sasgui/perspectives/calculator/image_viewer_help> 16 16 17 17 Kiessig Thickness Calculator <sasgui/perspectives/calculator/kiessig_calculator_help> 18 18 19 19 SLD Calculator <sasgui/perspectives/calculator/sld_calculator_help> 20 20 21 21 Slit Size Calculator <sasgui/perspectives/calculator/slit_calculator_help> 22 22 23 23 Q Resolution Estimator <sasgui/perspectives/calculator/resolution_calculator_help> 24 24 25 25 Python Shell <sasgui/perspectives/calculator/python_shell_help> 26 26 27 File Converter <sasgui/perspectives/file_converter/file_converter_help> -
run.py
r832fea2 r18e7309 72 72 platform = '%s-%s'%(get_platform(),sys.version[:3]) 73 73 build_path = joinpath(root, 'build','lib.'+platform) 74 75 # Notify the help menu that the Sphinx documentation is in a different 74 75 # Notify the help menu that the Sphinx documentation is in a different 76 76 # place than it otherwise would be. 77 77 os.environ['SASVIEW_DOC_PATH'] = joinpath(build_path, "doc") … … 123 123 # Compiled modules need to be pulled from the build directory. 124 124 # Some packages are not where they are needed, so load them explicitly. 125 import sas.sascalc.file_converter 126 sas.sascalc.file_converter.core = import_package('sas.sascalc.file_converter.core', 127 joinpath(build_path, 'sas', 'sascalc', 'file_converter', 'core')) 128 129 # Compiled modules need to be pulled from the build directory. 130 # Some packages are not where they are needed, so load them explicitly. 125 131 import sas.sascalc.calculator 126 132 sas.sascalc.calculator.core = import_package('sas.sascalc.calculator.core', -
sasview/sasview.py
r1be5202 r77d92cd 81 81 #Always use private .matplotlib setup to avoid conflicts with other 82 82 #uses of matplotlib 83 #Have to check if .sasview exists first 83 #Have to check if .sasview exists first 84 84 sasdir = os.path.join(os.path.expanduser("~"),'.sasview') 85 85 if not os.path.exists(sasdir): … … 119 119 # Fitting perspective 120 120 try: 121 import sas.sasgui.perspectives.fitting as module 121 import sas.sasgui.perspectives.fitting as module 122 122 fitting_plug = module.Plugin() 123 123 self.gui.add_perspective(fitting_plug) … … 145 145 logging.error(traceback.format_exc()) 146 146 147 #Calculator perspective 147 #Calculator perspective 148 148 try: 149 149 import sas.sasgui.perspectives.calculator as module … … 152 152 except: 153 153 logging.error("%s: could not find Calculator plug-in module"% \ 154 APP_NAME) 155 logging.error(traceback.format_exc()) 156 157 # File converter tool 158 try: 159 import sas.sasgui.perspectives.file_converter as module 160 converter_plug = module.Plugin() 161 self.gui.add_perspective(converter_plug) 162 except: 163 logging.error("%s: could not find File Converter plug-in module"% \ 154 164 APP_NAME) 155 165 logging.error(traceback.format_exc()) … … 191 201 if __name__ == "__main__": 192 202 run() 193 -
setup.py
rdb74ee8 r18e7309 9 9 from distutils.command.build_ext import build_ext 10 10 from distutils.core import Command 11 import numpy 11 12 12 13 # Manage version number ###################################### … … 54 55 print "Removing existing build directory", SASVIEW_BUILD, "for a clean build" 55 56 shutil.rmtree(SASVIEW_BUILD) 56 57 57 58 # 'sys.maxsize' and 64bit: Not supported for python2.5 58 59 is_64bits = False 59 60 if sys.version_info >= (2, 6): 60 61 is_64bits = sys.maxsize > 2**32 61 62 62 63 enable_openmp = False 63 64 … … 118 119 c = self.compiler.compiler_type 119 120 print "Compiling with %s (64bit=%s)" % (c, str(is_64bits)) 120 121 121 122 # OpenMP build options 122 123 if enable_openmp: … … 127 128 for e in self.extensions: 128 129 e.extra_link_args = lopt[ c ] 129 130 130 131 # Platform-specific build options 131 132 if platform_lopt.has_key(c): … … 205 206 ) 206 207 207 208 208 209 # sas.sascalc.pr 209 210 srcdir = os.path.join("src", "sas", "sascalc", "pr", "c_extensions") … … 217 218 include_dirs=[], 218 219 ) ) 219 220 221 # sas.sascalc.file_converter 222 mydir = os.path.join("src", "sas", "sascalc", "file_converter", "c_ext") 223 package_dir["sas.sascalc.file_converter.core"] = mydir 224 package_dir["sas.sascalc.file_converter"] = os.path.join("src","sas", "sascalc", "file_converter") 225 packages.extend(["sas.sascalc.file_converter","sas.sascalc.file_converter.core"]) 226 ext_modules.append( Extension("sas.sascalc.file_converter.core.bsl_loader", 227 sources = [os.path.join(mydir, "bsl_loader.c")], 228 include_dirs=[numpy.get_include()], 229 ) ) 230 220 231 # sas.sascalc.fit 221 232 package_dir["sas.sascalc.fit"] = os.path.join("src", "sas", "sascalc", "fit") … … 239 250 packages.extend(["sas.sasgui.perspectives", "sas.sasgui.perspectives.calculator"]) 240 251 package_data['sas.sasgui.perspectives.calculator'] = ['images/*', 'media/*'] 241 252 242 253 # Data util 243 254 package_dir["sas.sascalc.data_util"] = os.path.join("src", "sas", "sascalc", "data_util") … … 294 305 'test/2d_data/*', 295 306 'test/save_states/*', 296 'test/upcoming_formats/*', 307 'test/upcoming_formats/*', 297 308 'default_categories.json'] 298 309 packages.append("sas.sasview") … … 316 327 required.extend(['pillow']) 317 328 318 # Set up SasView 329 # Set up SasView 319 330 setup( 320 331 name="sasview", … … 341 352 'docs': BuildSphinxCommand, 342 353 'disable_openmp': DisableOpenMPCommand} 343 ) 354 )
Note: See TracChangeset
for help on using the changeset viewer.