Changeset 724af06 in sasview for src/sas/sascalc/dataloader/readers
- Timestamp:
- Sep 21, 2017 3:12:37 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 12d3e0e
- Parents:
- ce81f70 (diff), d76c43a (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader.py
ra78a02f rae69c690 130 130 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions 131 131 self._parse_entry(entry) 132 has_error_dx = self.current_dataset.dx is not None 133 has_error_dy = self.current_dataset.dy is not None 134 self.remove_empty_q_values(has_error_dx=has_error_dx, 135 has_error_dy=has_error_dy) 136 self.send_to_output() # Combine datasets with DataInfo 137 self.current_datainfo = DataInfo() # Reset DataInfo 132 self.data_cleanup() 138 133 except FileContentsException as fc_exc: 139 134 # File doesn't meet schema - try loading with a less strict schema … … 154 149 self.load_file_and_schema(xml_file) # Reload strict schema so we can find where error are in file 155 150 invalid_xml = self.find_invalid_xml() 156 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 157 raise DataReaderException(invalid_xml) # Handled by base class 151 if invalid_xml != "": 152 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 153 raise DataReaderException(invalid_xml) # Handled by base class 158 154 except FileContentsException as fc_exc: 159 155 msg = "CanSAS Reader could not load the file {}".format(xml_file) … … 279 275 # I and Q points 280 276 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 281 unit_list = unit.split("|") 282 if len(unit_list) > 1: 283 self.current_dataset.yaxis(unit_list[0].strip(), 284 unit_list[1].strip()) 285 else: 286 self.current_dataset.yaxis("Intensity", unit) 277 self.current_dataset.yaxis("Intensity", unit) 287 278 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 288 279 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 289 280 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 290 281 elif tagname == 'Q': 291 unit_list = unit.split("|") 292 if len(unit_list) > 1: 293 self.current_dataset.xaxis(unit_list[0].strip(), 294 unit_list[1].strip()) 295 else: 296 self.current_dataset.xaxis("Q", unit) 282 self.current_dataset.xaxis("Q", unit) 297 283 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 298 284 elif tagname == 'Qdev': 299 285 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 300 286 elif tagname == 'dQw': 301 if self.current_dataset.dxw is None: 302 self.current_dataset.dxw = np.empty(0) 303 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 287 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 304 288 elif tagname == 'dQl': 305 if self.current_dataset.dxl is None:306 self.current_dataset.dxl = np.empty(0)307 289 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 308 290 elif tagname == 'Qmean': … … 312 294 elif tagname == 'Sesans': 313 295 self.current_datainfo.isSesans = bool(data_point) 296 self.current_dataset.xaxis(attr.get('x_axis'), 297 attr.get('x_unit')) 298 self.current_dataset.yaxis(attr.get('y_axis'), 299 attr.get('y_unit')) 314 300 elif tagname == 'yacceptance': 315 301 self.current_datainfo.sample.yacceptance = (data_point, unit) … … 512 498 for error in self.errors: 513 499 self.current_datainfo.errors.add(error) 514 self.errors.clear() 515 self.send_to_output() 500 self.data_cleanup() 501 self.sort_one_d_data() 502 self.sort_two_d_data() 503 self.reset_data_list() 516 504 empty = None 517 505 return self.output[0], empty 506 507 def data_cleanup(self): 508 """ 509 Clean up the data sets and refresh everything 510 :return: None 511 """ 512 has_error_dx = self.current_dataset.dx is not None 513 has_error_dxl = self.current_dataset.dxl is not None 514 has_error_dxw = self.current_dataset.dxw is not None 515 has_error_dy = self.current_dataset.dy is not None 516 self.remove_empty_q_values(has_error_dx=has_error_dx, 517 has_error_dxl=has_error_dxl, 518 has_error_dxw=has_error_dxw, 519 has_error_dy=has_error_dy) 520 self.send_to_output() # Combine datasets with DataInfo 521 self.current_datainfo = DataInfo() # Reset DataInfo 518 522 519 523 def _is_call_local(self): … … 642 646 value_unit = local_unit 643 647 except KeyError: 644 err_msg = "CanSAS reader: unexpected " 645 err_msg += "\"{0}\" unit [{1}]; " 646 err_msg = err_msg.format(tagname, local_unit) 647 err_msg += "expecting [{0}]".format(default_unit) 648 # Do not throw an error for loading Sesans data in cansas xml 649 # This is a temporary fix. 650 if local_unit != "A" and local_unit != 'pol': 651 err_msg = "CanSAS reader: unexpected " 652 err_msg += "\"{0}\" unit [{1}]; " 653 err_msg = err_msg.format(tagname, local_unit) 654 err_msg += "expecting [{0}]".format(default_unit) 648 655 value_unit = local_unit 649 656 except: … … 675 682 di_exists = True 676 683 if dqw_exists and not dql_exists: 677 array_size = self.current_dataset.dxw.size - 1 678 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 679 np.zeros([array_size])) 684 array_size = self.current_dataset.dxw.size 685 self.current_dataset.dxl = np.zeros(array_size) 680 686 elif dql_exists and not dqw_exists: 681 array_size = self.current_dataset.dxl.size - 1 682 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 683 np.zeros([array_size])) 687 array_size = self.current_dataset.dxl.size 688 self.current_dataset.dxw = np.zeros(array_size) 684 689 elif not dql_exists and not dqw_exists and not dq_exists: 685 array_size = self.current_dataset.x.size - 1690 array_size = self.current_dataset.x.size 686 691 self.current_dataset.dx = np.append(self.current_dataset.dx, 687 692 np.zeros([array_size])) 688 693 if not di_exists: 689 array_size = self.current_dataset.y.size - 1694 array_size = self.current_dataset.y.size 690 695 self.current_dataset.dy = np.append(self.current_dataset.dy, 691 696 np.zeros([array_size])) … … 857 862 node.append(point) 858 863 self.write_node(point, "Q", datainfo.x[i], 859 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})864 {'unit': datainfo.x_unit}) 860 865 if len(datainfo.y) >= i: 861 866 self.write_node(point, "I", datainfo.y[i], 862 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})867 {'unit': datainfo.y_unit}) 863 868 if datainfo.dy is not None and len(datainfo.dy) > i: 864 869 self.write_node(point, "Idev", datainfo.dy[i], 865 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})870 {'unit': datainfo.y_unit}) 866 871 if datainfo.dx is not None and len(datainfo.dx) > i: 867 872 self.write_node(point, "Qdev", datainfo.dx[i], 868 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})873 {'unit': datainfo.x_unit}) 869 874 if datainfo.dxw is not None and len(datainfo.dxw) > i: 870 875 self.write_node(point, "dQw", datainfo.dxw[i], 871 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})876 {'unit': datainfo.x_unit}) 872 877 if datainfo.dxl is not None and len(datainfo.dxl) > i: 873 878 self.write_node(point, "dQl", datainfo.dxl[i], 874 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})879 {'unit': datainfo.x_unit}) 875 880 if datainfo.isSesans: 876 sesans = self.create_element("Sesans") 881 sesans_attrib = {'x_axis': datainfo._xaxis, 882 'y_axis': datainfo._yaxis, 883 'x_unit': datainfo.x_unit, 884 'y_unit': datainfo.y_unit} 885 sesans = self.create_element("Sesans", attrib=sesans_attrib) 877 886 sesans.text = str(datainfo.isSesans) 878 node.append(sesans)879 self.write_node( node, "yacceptance", datainfo.sample.yacceptance[0],887 entry_node.append(sesans) 888 self.write_node(entry_node, "yacceptance", datainfo.sample.yacceptance[0], 880 889 {'unit': datainfo.sample.yacceptance[1]}) 881 self.write_node( node, "zacceptance", datainfo.sample.zacceptance[0],890 self.write_node(entry_node, "zacceptance", datainfo.sample.zacceptance[0], 882 891 {'unit': datainfo.sample.zacceptance[1]}) 883 892 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rdcb91cf rcd57c7d4 140 140 141 141 if isinstance(value, h5py.Group): 142 # Set parent class before recursion 142 143 self.parent_class = class_name 143 144 parent_list.append(key) … … 150 151 # Recursion step to access data within the group 151 152 self.read_children(value, parent_list) 153 # Reset parent class when returning from recursive method 154 self.parent_class = class_name 152 155 self.add_intermediate() 153 156 parent_list.remove(key) -
src/sas/sascalc/dataloader/readers/xml_reader.py
rfafe52a rcd57c7d4 134 134 first_error = schema.assertValid(self.xmldoc) 135 135 except etree.DocumentInvalid as err: 136 # Suppress errors for <'any'> elements 137 if "##other" in str(err): 138 return first_error 136 139 first_error = str(err) 137 140 return first_error -
src/sas/sascalc/dataloader/readers/__init__.py
r7a5d066 r488f3a5 1 1 # Method to associate extensions to default readers 2 2 from associations import read_associations 3 4 5 # Method to return the location of the XML settings file6 def get_data_path():7 """8 Return the location of the settings file for the data readers.9 """10 import os11 return os.path.dirname(__file__)
Note: See TracChangeset
for help on using the changeset viewer.