Changeset 682c432 in sasview for src/sas/dataloader/readers
- Timestamp:
- Mar 13, 2015 11:23:24 AM (10 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 0ea31ca
- Parents:
- bb074b3
- Location:
- src/sas/dataloader/readers
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/dataloader/readers/cansas_constants.py
rc43e875 r682c432 101 101 RUN = {"variable" : "{0}.run.append(\"{1}\")", 102 102 "attributes" : {"name" : 103 {"variable" : "{0}.run_name[ node_value] = \"{1}\""}}103 {"variable" : "{0}.run_name[\"{3}\"] = \"{1}\""}} 104 104 } 105 105 SASDATA_IDATA_Q = {"variable" : "{0}.x = numpy.append({0}.x, {1})", … … 623 623 "attributes" : {"name" : 624 624 {"variable" : 625 "{0}.run_name[ node_value] = \"{1}\""}},625 "{0}.run_name[\"{3}\"] = \"{1}\""}}, 626 626 "children" : {"Title" : TITLE, 627 627 "Run" : RUN, -
src/sas/dataloader/readers/cansas_reader.py
rd26dea0 r682c432 26 26 from sas.dataloader.data_info import Process 27 27 from sas.dataloader.data_info import Aperture 28 # Both imports used. Do not remove either.29 from xml.dom.minidom import parseString30 28 import sas.dataloader.readers.xml_reader as xml_reader 31 29 from sas.dataloader.readers.xml_reader import XMLreader 32 30 from sas.dataloader.readers.cansas_constants import CansasConstants 31 32 # The following 2 imports *ARE* used. Do not remove either. 33 import xml.dom.minidom 34 from xml.dom.minidom import parseString 33 35 34 36 _ZERO = 1e-16 … … 47 49 ALLOW_ALL = True 48 50 49 # DO NOT REMOVE 50 # Called by outside packages: 51 # DO NOT REMOVE Called by outside packages: 51 52 # sas.perspectives.invariant.invariant_state 52 53 # sas.perspectives.fitting.pagestate … … 62 63 nodes = node.xpath(location, 63 64 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")}) 64 65 65 if len(nodes) > 0: 66 66 return nodes[0] … … 68 68 return None 69 69 70 71 # DO NOT REMOVE 72 # Called by outside packages: 70 # DO NOT REMOVE Called by outside packages: 73 71 # sas.perspectives.fitting.pagestate 74 72 def write_node(doc, parent, name, value, attr=None): … … 93 91 return False 94 92 95 96 93 class Reader(XMLreader): 97 94 """ … … 104 101 cansas_version = "1.0" 105 102 base_ns = "{cansas1d/1.0}" 106 107 103 logging = None 108 104 errors = None 109 110 105 type_name = "canSAS" 111 106 ## Wildcards … … 113 108 ## List of allowed extensions 114 109 ext = ['.xml', '.XML', '.svs', '.SVS'] 115 116 110 ## Flag to bypass extension check 117 111 allow_all = True 118 119 112 120 113 def __init__(self): … … 123 116 self.logging = [] 124 117 self.encoding = None 125 126 118 127 119 def is_cansas(self, ext="xml"): … … 141 133 return False 142 134 143 144 135 def load_file_and_schema(self, xml_file): 145 136 """ … … 165 156 return cansas_defaults 166 157 167 168 158 def read(self, xml_file): 169 159 """ … … 172 162 :param xml_file: A canSAS file path in proper XML format 173 163 """ 174 175 164 # output - Final list of Data1D objects 176 165 output = [] … … 241 230 return None 242 231 243 244 232 def _final_cleanup(self, data1d): 245 233 """ … … 260 248 size_dxl = data1d.dxl.size 261 249 size_dxw = data1d.dxw.size 250 if data1d._xunit != data1d.x_unit: 251 data1d.x_unit = data1d._xunit 252 if data1d._yunit != data1d.y_unit: 253 data1d.y_unit = data1d._yunit 262 254 if size_dxl == 0 and size_dxw == 0: 263 255 data1d.dxl = None … … 287 279 return name 288 280 289 290 281 def _unit_conversion(self, node, new_current_level, data1d, \ 291 282 tagname, node_value): … … 303 294 value_unit = '' 304 295 err_msg = None 296 default_unit = None 305 297 if 'unit' in attr and new_current_level.get('unit') is not None: 306 298 try: … … 308 300 if isinstance(node_value, float) is False: 309 301 exec("node_value = float({0})".format(node_value)) 310 default_unit = None311 302 unitname = new_current_level.get("unit") 312 303 exec "default_unit = data1d.{0}".format(unitname) … … 318 309 data_conv_q = Converter(local_unit) 319 310 value_unit = default_unit 320 i_string = "node_value = data_conv_q" 321 i_string += "(node_value, units=data1d.{0})" 322 exec i_string.format(unitname) 311 node_value = data_conv_q(node_value, units=default_unit) 323 312 else: 324 313 value_unit = local_unit … … 330 319 err_msg += "\"{0}\" unit [{1}]; " 331 320 err_msg = err_msg.format(tagname, local_unit) 332 intermediate = "err_msg += " + \ 333 "\"expecting [{1}]\"" + \ 334 ".format(data1d.{0})" 335 exec intermediate.format(unitname, "{0}", "{1}") 321 err_msg += "expecting [{0}]".format(default_unit) 336 322 value_unit = local_unit 337 323 except: … … 347 333 node_value = "float({0})".format(node_value) 348 334 return node_value, value_unit 349 350 335 351 336 def _check_for_empty_data(self, data1d): … … 475 460 else: 476 461 attrib_value = node.attrib[key] 477 store_attr = attrib_variable.format("data1d", \ 478 attrib_value, key) 462 store_attr = attrib_variable.format("data1d", 463 attrib_value, 464 key, 465 node_value) 479 466 exec store_attr 480 467 except AttributeError: … … 698 685 node.append(point) 699 686 self.write_node(point, "Q", datainfo.x[i], 700 {'unit': datainfo. _xunit})687 {'unit': datainfo.x_unit}) 701 688 if len(datainfo.y) >= i: 702 689 self.write_node(point, "I", datainfo.y[i], 703 {'unit': datainfo. _yunit})690 {'unit': datainfo.y_unit}) 704 691 if datainfo.dy != None and len(datainfo.dy) > i: 705 692 self.write_node(point, "Idev", datainfo.dy[i], 706 {'unit': datainfo. _yunit})693 {'unit': datainfo.y_unit}) 707 694 if datainfo.dx != None and len(datainfo.dx) > i: 708 695 self.write_node(point, "Qdev", datainfo.dx[i], 709 {'unit': datainfo. _xunit})696 {'unit': datainfo.x_unit}) 710 697 if datainfo.dxw != None and len(datainfo.dxw) > i: 711 698 self.write_node(point, "dQw", datainfo.dxw[i], 712 {'unit': datainfo. _xunit})699 {'unit': datainfo.x_unit}) 713 700 if datainfo.dxl != None and len(datainfo.dxl) > i: 714 701 self.write_node(point, "dQl", datainfo.dxl[i], 715 {'unit': datainfo. _xunit})702 {'unit': datainfo.x_unit}) 716 703 717 704 def _write_trans_spectrum(self, datainfo, entry_node): … … 895 882 {"unit": aperture.distance_unit}) 896 883 897 898 884 def _write_detectors(self, datainfo, instr): 899 885 """ … … 1038 1024 # Get PIs and create root element 1039 1025 pi_string = self._get_pi_string() 1040 1041 1026 # Define namespaces and create SASroot object 1042 1027 main_node = self._create_main_node() 1043 1044 1028 # Create ElementTree, append SASroot and apply processing instructions 1045 1029 base_string = pi_string + self.to_string(main_node) 1046 1030 base_element = self.create_element_from_string(base_string) 1047 1031 doc = self.create_tree(base_element) 1048 1049 1032 # Create SASentry Element 1050 1033 entry_node = self.create_element("SASentry") … … 1054 1037 # Add Title to SASentry 1055 1038 self.write_node(entry_node, "Title", datainfo.title) 1056 1057 1039 # Add Run to SASentry 1058 1040 self._write_run_names(datainfo, entry_node) 1059 1060 1041 # Add Data info to SASEntry 1061 1042 self._write_data(datainfo, entry_node) 1062 1063 1043 # Transmission Spectrum Info 1064 1044 self._write_trans_spectrum(datainfo, entry_node) 1065 1066 1045 # Sample info 1067 1046 self._write_sample_info(datainfo, entry_node) 1068 1069 1047 # Instrument info 1070 1048 instr = self._write_instrument(datainfo, entry_node) 1071 1072 1049 # Source 1073 1050 self._write_source(datainfo, instr) 1074 1075 1051 # Collimation 1076 1052 self._write_collimation(datainfo, instr) 1077 1078 1053 # Detectors 1079 1054 self._write_detectors(datainfo, instr) 1080 1081 1055 # Processes info 1082 1056 self._write_process_notes(datainfo, entry_node) 1083 1084 1057 # Note info 1085 1058 self._write_notes(datainfo, entry_node) 1086 1087 1059 # Return the document, and the SASentry node associated with 1088 1060 # the data we just wrote … … 1090 1062 # object rather than an lxml object. 1091 1063 entry_node = self._check_origin(entry_node, doc) 1092 1093 1064 return doc, entry_node 1094 1065
Note: See TracChangeset
for help on using the changeset viewer.