Changeset 4abcc93a in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Aug 19, 2016 9:03:25 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- b61bd57, b61796f
- Parents:
- 5f26aa4 (diff), a4deca6 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
rd72567e ra4deca6 445 445 return len(self.name) == 0 and len(self.date) == 0 and len(self.description) == 0 \ 446 446 and len(self.term) == 0 and len(self.notes) == 0 447 447 448 448 def single_line_desc(self): 449 449 """ … … 451 451 """ 452 452 return "%s %s %s" % (self.name, self.date, self.description) 453 453 454 454 def __str__(self): 455 455 _str = "Process:\n" … … 1220 1220 result.mask = numpy.append(self.mask, other.mask) 1221 1221 if result.err_data is not None: 1222 result.err_data = numpy.append(self.err_data, other.err_data) 1222 result.err_data = numpy.append(self.err_data, other.err_data) 1223 1223 if self.dqx_data is not None: 1224 1224 result.dqx_data = numpy.append(self.dqx_data, other.dqx_data) … … 1252 1252 final_dataset.yaxis(data._yaxis, data._yunit) 1253 1253 final_dataset.zaxis(data._zaxis, data._zunit) 1254 final_dataset.x_bins = data.x_bins 1255 final_dataset.y_bins = data.y_bins 1254 1256 else: 1255 1257 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rd72567e ra4deca6 62 62 :return: List of Data1D/2D objects and/or a list of errors. 63 63 """ 64 65 64 ## Reinitialize the class when loading a new data file to reset all class variables 66 65 self.reset_class_variables() … … 136 135 ## If this is a dataset, store the data appropriately 137 136 data_set = data[key][:] 137 unit = self._get_unit(value) 138 139 ## I and Q Data 140 if key == u'I': 141 if type(self.current_dataset) is plottable_2D: 142 self.current_dataset.data = data_set 143 self.current_dataset.zaxis("Intensity", unit) 144 else: 145 self.current_dataset.y = data_set.flatten() 146 self.current_dataset.yaxis("Intensity", unit) 147 continue 148 elif key == u'Idev': 149 if type(self.current_dataset) is plottable_2D: 150 self.current_dataset.err_data = data_set.flatten() 151 else: 152 self.current_dataset.dy = data_set.flatten() 153 continue 154 elif key == u'Q': 155 self.current_dataset.xaxis("Q", unit) 156 if type(self.current_dataset) is plottable_2D: 157 self.current_dataset.q = data_set.flatten() 158 else: 159 self.current_dataset.x = data_set.flatten() 160 continue 161 elif key == u'Qy': 162 self.current_dataset.yaxis("Q_y", unit) 163 self.current_dataset.qy_data = data_set.flatten() 164 continue 165 elif key == u'Qydev': 166 self.current_dataset.dqy_data = data_set.flatten() 167 continue 168 elif key == u'Qx': 169 self.current_dataset.xaxis("Q_x", unit) 170 self.current_dataset.qx_data = data_set.flatten() 171 continue 172 elif key == u'Qxdev': 173 self.current_dataset.dqx_data = data_set.flatten() 174 continue 175 elif key == u'Mask': 176 self.current_dataset.mask = data_set.flatten() 177 continue 138 178 139 179 for data_point in data_set: 140 180 ## Top Level Meta Data 141 unit = self._get_unit(value)142 181 if key == u'definition': 143 182 self.current_datainfo.meta_data['reader'] = data_point … … 148 187 elif key == u'SASnote': 149 188 self.current_datainfo.notes.append(data_point) 150 151 ## I and Q Data152 elif key == u'I':153 if type(self.current_dataset) is plottable_2D:154 self.current_dataset.data = np.append(self.current_dataset.data, data_point)155 self.current_dataset.zaxis("Intensity", unit)156 else:157 self.current_dataset.y = np.append(self.current_dataset.y, data_point)158 self.current_dataset.yaxis("Intensity", unit)159 elif key == u'Idev':160 if type(self.current_dataset) is plottable_2D:161 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point)162 else:163 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point)164 elif key == u'Q':165 self.current_dataset.xaxis("Q", unit)166 if type(self.current_dataset) is plottable_2D:167 self.current_dataset.q = np.append(self.current_dataset.q, data_point)168 else:169 self.current_dataset.x = np.append(self.current_dataset.x, data_point)170 elif key == u'Qy':171 self.current_dataset.yaxis("Q_y", unit)172 self.current_dataset.qy_data = np.append(self.current_dataset.qy_data, data_point)173 elif key == u'Qydev':174 self.current_dataset.dqy_data = np.append(self.current_dataset.dqy_data, data_point)175 elif key == u'Qx':176 self.current_dataset.xaxis("Q_x", unit)177 self.current_dataset.qx_data = np.append(self.current_dataset.qx_data, data_point)178 elif key == u'Qxdev':179 self.current_dataset.dqx_data = np.append(self.current_dataset.dqx_data, data_point)180 elif key == u'Mask':181 self.current_dataset.mask = np.append(self.current_dataset.mask, data_point)182 189 183 190 ## Sample Information … … 296 303 ## Type cast data arrays to float64 and find min/max as appropriate 297 304 for dataset in self.data2d: 298 dataset.data = np.delete(dataset.data, [0])299 305 dataset.data = dataset.data.astype(np.float64) 300 dataset.err_data = np.delete(dataset.err_data, [0])301 306 dataset.err_data = dataset.err_data.astype(np.float64) 302 dataset.mask = np.delete(dataset.mask, [0])303 307 if dataset.qx_data is not None: 304 dataset.qx_data = np.delete(dataset.qx_data, [0])305 308 dataset.xmin = np.min(dataset.qx_data) 306 309 dataset.xmax = np.max(dataset.qx_data) 307 310 dataset.qx_data = dataset.qx_data.astype(np.float64) 308 311 if dataset.dqx_data is not None: 309 dataset.dqx_data = np.delete(dataset.dqx_data, [0])310 312 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 311 313 if dataset.qy_data is not None: 312 dataset.qy_data = np.delete(dataset.qy_data, [0])313 314 dataset.ymin = np.min(dataset.qy_data) 314 315 dataset.ymax = np.max(dataset.qy_data) 315 316 dataset.qy_data = dataset.qy_data.astype(np.float64) 316 317 if dataset.dqy_data is not None: 317 dataset.dqy_data = np.delete(dataset.dqy_data, [0])318 318 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 319 319 if dataset.q_data is not None: 320 dataset.q_data = np.delete(dataset.q_data, [0])321 320 dataset.q_data = dataset.q_data.astype(np.float64) 322 321 zeros = np.ones(dataset.data.size, dtype=bool) … … 333 332 except: 334 333 dataset.q_data = None 334 335 if dataset.data.ndim == 2: 336 (n_rows, n_cols) = dataset.data.shape 337 dataset.y_bins = dataset.qy_data[0::n_rows] 338 dataset.x_bins = dataset.qx_data[:n_cols] 339 dataset.data = dataset.data.flatten() 340 335 341 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) 336 342 self.output.append(final_dataset) … … 338 344 for dataset in self.data1d: 339 345 if dataset.x is not None: 340 dataset.x = np.delete(dataset.x, [0])341 346 dataset.x = dataset.x.astype(np.float64) 342 347 dataset.xmin = np.min(dataset.x) 343 348 dataset.xmax = np.max(dataset.x) 344 349 if dataset.y is not None: 345 dataset.y = np.delete(dataset.y, [0])346 350 dataset.y = dataset.y.astype(np.float64) 347 351 dataset.ymin = np.min(dataset.y) 348 352 dataset.ymax = np.max(dataset.y) 349 353 if dataset.dx is not None: 350 dataset.dx = np.delete(dataset.dx, [0])351 354 dataset.dx = dataset.dx.astype(np.float64) 352 355 if dataset.dxl is not None: 353 dataset.dxl = np.delete(dataset.dxl, [0])354 356 dataset.dxl = dataset.dxl.astype(np.float64) 355 357 if dataset.dxw is not None: 356 dataset.dxw = np.delete(dataset.dxw, [0])357 358 dataset.dxw = dataset.dxw.astype(np.float64) 358 359 if dataset.dy is not None: 359 dataset.dy = np.delete(dataset.dy, [0])360 360 dataset.dy = dataset.dy.astype(np.float64) 361 361 final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo) -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r1686a333 r5f26aa4 206 206 # Go through each child in the parent element 207 207 for node in dom: 208 attr = node.attrib 209 name = attr.get("name", "") 210 type = attr.get("type", "") 208 211 # Get the element name and set the current names level 209 212 tagname = node.tag.replace(self.base_ns, "") … … 223 226 ## Recursion step to access data within the group 224 227 self._parse_entry(node) 228 if tagname == "SASsample": 229 self.current_datainfo.sample.name = name 230 elif tagname == "beam_size": 231 self.current_datainfo.source.beam_size_name = name 232 elif tagname == "SAScollimation": 233 self.collimation.name = name 234 elif tagname == "aperture": 235 self.aperture.name = name 236 self.aperture.type = type 225 237 self.add_intermediate() 226 238 else: … … 229 241 ## If this is a dataset, store the data appropriately 230 242 if tagname == 'Run': 243 self.current_datainfo.run_name[data_point] = name 231 244 self.current_datainfo.run.append(data_point) 232 245 elif tagname == 'Title': … … 369 382 self.process.notes.append(data_point) 370 383 elif tagname == 'term' and self.parent_class == 'SASprocess': 371 self.process.term.append(data_point) 384 unit = attr.get("unit", "") 385 dic = {} 386 dic["name"] = name 387 dic["value"] = data_point 388 dic["unit"] = unit 389 self.process.term.append(dic) 372 390 373 391 ## Transmission Spectrum … … 531 549 self.current_datainfo.collimation.append(self.collimation) 532 550 self.collimation = Collimation() 533 elif self.parent_class == ' SASaperture':551 elif self.parent_class == 'aperture': 534 552 self.collimation.aperture.append(self.aperture) 535 553 self.aperture = Aperture() … … 646 664 err_msg = None 647 665 default_unit = None 648 if 'unit' in attr and attr.get('unit') is not None and not self.ns_list.ns_optional: 666 if not isinstance(node_value, float): 667 node_value = float(node_value) 668 if 'unit' in attr and attr.get('unit') is not None: 649 669 try: 650 670 local_unit = attr['unit'] 651 if not isinstance(node_value, float):652 node_value = float(node_value)653 671 unitname = self.ns_list.current_level.get("unit", "") 654 672 if "SASdetector" in self.names: … … 907 925 self.write_node(point, "I", datainfo.y[i], 908 926 {'unit': datainfo.y_unit}) 909 if datainfo.dy !=None and len(datainfo.dy) > i:927 if datainfo.dy is not None and len(datainfo.dy) > i: 910 928 self.write_node(point, "Idev", datainfo.dy[i], 911 929 {'unit': datainfo.y_unit}) 912 if datainfo.dx !=None and len(datainfo.dx) > i:930 if datainfo.dx is not None and len(datainfo.dx) > i: 913 931 self.write_node(point, "Qdev", datainfo.dx[i], 914 932 {'unit': datainfo.x_unit}) 915 if datainfo.dxw !=None and len(datainfo.dxw) > i:933 if datainfo.dxw is not None and len(datainfo.dxw) > i: 916 934 self.write_node(point, "dQw", datainfo.dxw[i], 917 935 {'unit': datainfo.x_unit}) 918 if datainfo.dxl !=None and len(datainfo.dxl) > i:936 if datainfo.dxl is not None and len(datainfo.dxl) > i: 919 937 self.write_node(point, "dQl", datainfo.dxl[i], 920 938 {'unit': datainfo.x_unit}) … … 1184 1202 if isinstance(term, list): 1185 1203 value = term['value'] 1204 del term['value'] 1205 elif isinstance(term, dict): 1206 value = term.get("value") 1186 1207 del term['value'] 1187 1208 else:
Note: See TracChangeset
for help on using the changeset viewer.