Changeset a40be8c in sasview for src/sas/sascalc/dataloader/readers/cansas_reader.py
- Timestamp:
- Sep 15, 2016 8:33:57 AM (8 years ago)
- Branches:
- ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- 158cb9e
- Parents:
- 2a08cb6
- git-author:
- Jeff Krzywon <krzywon@…> (08/18/16 12:52:59)
- git-committer:
- Piotr Rozyczko <rozyczko@…> (09/15/16 08:33:57)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader.py
ref51b63 ra40be8c 14 14 15 15 import logging 16 import numpy 16 import numpy as np 17 17 import os 18 18 import sys … … 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D 23 from sas.sascalc.dataloader.data_info import Collimation 24 from sas.sascalc.dataloader.data_info import TransmissionSpectrum 25 from sas.sascalc.dataloader.data_info import Detector 26 from sas.sascalc.dataloader.data_info import Process 27 from sas.sascalc.dataloader.data_info import Aperture 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 28 25 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 29 26 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 30 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants 27 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants, CurrentLevel 31 28 32 29 # The following 2 imports *ARE* used. Do not remove either. … … 34 31 from xml.dom.minidom import parseString 35 32 36 ## TODO: Refactor to load multiple <SASData> as separate Data1D objects37 ## TODO: Refactor to allow invalid XML, but give a useful warning when loaded38 39 _ZERO = 1e-1640 33 PREPROCESS = "xmlpreprocess" 41 34 ENCODING = "encoding" 42 35 RUN_NAME_DEFAULT = "None" 36 INVALID_SCHEMA_PATH_1_1 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd" 37 INVALID_SCHEMA_PATH_1_0 = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd" 38 INVALID_XML = "\n\nThe loaded xml file, {0} does not fully meet the CanSAS v1.x specification. SasView loaded " + \ 39 "as much of the data as possible.\n\n" 43 40 HAS_CONVERTER = True 44 41 try: … … 52 49 ALLOW_ALL = True 53 50 54 # DO NOT REMOVE Called by outside packages:55 # sas.sasgui.perspectives.invariant.invariant_state56 # sas.sasgui.perspectives.fitting.pagestate57 def get_content(location, node):58 """59 Get the first instance of the content of a xpath location.60 61 :param location: xpath location62 :param node: node to start at63 64 :return: Element, or None65 """66 nodes = node.xpath(location,67 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")})68 if len(nodes) > 0:69 return nodes[0]70 else:71 return None72 73 # DO NOT REMOVE Called by outside packages:74 # sas.sasgui.perspectives.fitting.pagestate75 def write_node(doc, parent, name, value, attr=None):76 """77 :param doc: document DOM78 :param parent: parent node79 :param name: tag of the element80 :param value: value of the child text node81 :param attr: attribute dictionary82 83 :return: True if something was appended, otherwise False84 """85 if attr is None:86 attr = {}87 if value is not None:88 node = doc.createElement(name)89 node.appendChild(doc.createTextNode(str(value)))90 for item in attr:91 node.setAttribute(item, attr[item])92 parent.appendChild(node)93 return True94 return False95 96 51 class Reader(XMLreader): 97 52 """ … … 101 56 The CanSAS reader requires PyXML 0.8.4 or later. 102 57 """ 103 ## CanSAS version - defaults to version 1.058 ## CanSAS version - defaults to version 1.0 104 59 cansas_version = "1.0" 105 60 base_ns = "{cansas1d/1.0}" 61 cansas_defaults = None 62 type_name = "canSAS" 63 invalid = True 64 ## Log messages and errors 106 65 logging = None 107 errors = None 108 type_name = "canSAS" 66 errors = set() 67 ## Namespace hierarchy for current xml_file object 68 names = None 69 ns_list = None 70 ## Temporary storage location for loading multiple data sets in a single file 71 current_datainfo = None 72 current_dataset = None 73 current_data1d = None 74 data = None 75 ## List of data1D objects to be sent back to SasView 76 output = None 109 77 ## Wildcards 110 78 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] … … 114 82 allow_all = True 115 83 116 def __init__(self): 117 ## List of errors 118 self.errors = set() 84 def reset_state(self): 85 """ 86 Resets the class state to a base case when loading a new data file so previous 87 data files do not appear a second time 88 """ 89 self.current_datainfo = None 90 self.current_dataset = None 91 self.current_data1d = None 92 self.data = [] 93 self.process = Process() 94 self.transspectrum = TransmissionSpectrum() 95 self.aperture = Aperture() 96 self.collimation = Collimation() 97 self.detector = Detector() 98 self.names = [] 99 self.cansas_defaults = {} 100 self.output = [] 101 self.ns_list = None 119 102 self.logging = [] 120 103 self.encoding = None 104 105 def read(self, xml_file, schema_path="", invalid=True): 106 """ 107 Validate and read in an xml_file file in the canSAS format. 108 109 :param xml_file: A canSAS file path in proper XML format 110 :param schema_path: A file path to an XML schema to validate the xml_file against 111 """ 112 # For every file loaded, reset everything to a base state 113 self.reset_state() 114 self.invalid = invalid 115 # Check that the file exists 116 if os.path.isfile(xml_file): 117 basename, extension = os.path.splitext(os.path.basename(xml_file)) 118 # If the file type is not allowed, return nothing 119 if extension in self.ext or self.allow_all: 120 # Get the file location of 121 self.load_file_and_schema(xml_file, schema_path) 122 self.add_data_set() 123 # Try to load the file, but raise an error if unable to. 124 # Check the file matches the XML schema 125 try: 126 self.is_cansas(extension) 127 self.invalid = False 128 # Get each SASentry from XML file and add it to a list. 129 entry_list = self.xmlroot.xpath( 130 '/ns:SASroot/ns:SASentry', 131 namespaces={'ns': self.cansas_defaults.get("ns")}) 132 self.names.append("SASentry") 133 134 # Get all preprocessing events and encoding 135 self.set_processing_instructions() 136 137 # Parse each <SASentry> item 138 for entry in entry_list: 139 # Create a new DataInfo object for every <SASentry> 140 141 142 # Set the file name and then parse the entry. 143 self.current_datainfo.filename = basename + extension 144 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 145 self.current_datainfo.meta_data[PREPROCESS] = \ 146 self.processing_instructions 147 148 # Parse the XML SASentry 149 self._parse_entry(entry) 150 # Combine datasets with datainfo 151 self.add_data_set() 152 except RuntimeError: 153 # If the file does not match the schema, raise this error 154 invalid_xml = self.find_invalid_xml() 155 invalid_xml = INVALID_XML.format(basename + extension) + invalid_xml 156 self.errors.add(invalid_xml) 157 # Try again with an invalid CanSAS schema, that requires only a data set in each 158 base_name = xml_reader.__file__ 159 base_name = base_name.replace("\\", "/") 160 base = base_name.split("/sas/")[0] 161 if self.cansas_version == "1.1": 162 invalid_schema = INVALID_SCHEMA_PATH_1_1.format(base, self.cansas_defaults.get("schema")) 163 else: 164 invalid_schema = INVALID_SCHEMA_PATH_1_0.format(base, self.cansas_defaults.get("schema")) 165 self.set_schema(invalid_schema) 166 try: 167 if self.invalid: 168 if self.is_cansas(): 169 self.output = self.read(xml_file, invalid_schema, False) 170 else: 171 raise RuntimeError 172 else: 173 raise RuntimeError 174 except RuntimeError: 175 x = np.zeros(1) 176 y = np.zeros(1) 177 self.current_data1d = Data1D(x,y) 178 self.current_data1d.errors = self.errors 179 return [self.current_data1d] 180 else: 181 self.output.append("Not a valid file path.") 182 # Return a list of parsed entries that dataloader can manage 183 return self.output 184 185 def _parse_entry(self, dom): 186 """ 187 Parse a SASEntry - new recursive method for parsing the dom of 188 the CanSAS data format. This will allow multiple data files 189 and extra nodes to be read in simultaneously. 190 191 :param dom: dom object with a namespace base of names 192 """ 193 194 self._check_for_empty_data() 195 self.base_ns = "{0}{1}{2}".format("{", \ 196 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 197 tagname = '' 198 tagname_original = '' 199 200 # Go through each child in the parent element 201 for node in dom: 202 # Get the element name and set the current names level 203 tagname = node.tag.replace(self.base_ns, "") 204 tagname_original = tagname 205 # Skip this iteration when loading in save state information 206 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant": 207 continue 208 209 # Get where to store content 210 self.names.append(tagname_original) 211 self.ns_list = CONSTANTS.iterate_namespace(self.names) 212 # If the element is a child element, recurse 213 if len(node.getchildren()) > 0: 214 self.parent_class = tagname_original 215 if tagname == 'SASdata': 216 self._initialize_new_data_set() 217 ## Recursion step to access data within the group 218 self._parse_entry(node) 219 self.add_intermediate() 220 else: 221 data_point, unit = self._get_node_value(node, tagname) 222 223 ## If this is a dataset, store the data appropriately 224 if tagname == 'Run': 225 self.current_datainfo.run.append(data_point) 226 elif tagname == 'Title': 227 self.current_datainfo.title = data_point 228 elif tagname == 'SASnote': 229 self.current_datainfo.notes.append(data_point) 230 231 ## I and Q Data 232 elif tagname == 'I': 233 self.current_dataset.yaxis("Intensity", unit) 234 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 235 elif tagname == 'Idev': 236 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 237 elif tagname == 'Q': 238 self.current_dataset.xaxis("Q", unit) 239 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 240 elif tagname == 'Qdev': 241 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 242 elif tagname == 'dQw': 243 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 244 elif tagname == 'dQl': 245 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 246 247 ## Sample Information 248 elif tagname == 'ID' and self.parent_class == 'SASsample': 249 self.current_datainfo.sample.ID = data_point 250 elif tagname == 'Title' and self.parent_class == 'SASsample': 251 self.current_datainfo.sample.name = data_point 252 elif tagname == 'thickness' and self.parent_class == 'SASsample': 253 self.current_datainfo.sample.thickness = data_point 254 self.current_datainfo.sample.thickness_unit = unit 255 elif tagname == 'transmission' and self.parent_class == 'SASsample': 256 self.current_datainfo.sample.transmission = data_point 257 elif tagname == 'temperature' and self.parent_class == 'SASsample': 258 self.current_datainfo.sample.temperature = data_point 259 self.current_datainfo.sample.temperature_unit = unit 260 elif tagname == 'details' and self.parent_class == 'SASsample': 261 self.current_datainfo.sample.details.append(data_point) 262 elif tagname == 'x' and self.parent_class == 'position': 263 self.current_datainfo.sample.position.x = data_point 264 self.current_datainfo.sample.position_unit = unit 265 elif tagname == 'y' and self.parent_class == 'position': 266 self.current_datainfo.sample.position.y = data_point 267 self.current_datainfo.sample.position_unit = unit 268 elif tagname == 'z' and self.parent_class == 'position': 269 self.current_datainfo.sample.position.z = data_point 270 self.current_datainfo.sample.position_unit = unit 271 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASsample' in self.names: 272 self.current_datainfo.sample.orientation.x = data_point 273 self.current_datainfo.sample.orientation_unit = unit 274 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASsample' in self.names: 275 self.current_datainfo.sample.orientation.y = data_point 276 self.current_datainfo.sample.orientation_unit = unit 277 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASsample' in self.names: 278 self.current_datainfo.sample.orientation.z = data_point 279 self.current_datainfo.sample.orientation_unit = unit 280 281 ## Instrumental Information 282 elif tagname == 'name' and self.parent_class == 'SASinstrument': 283 self.current_datainfo.instrument = data_point 284 ## Detector Information 285 elif tagname == 'name' and self.parent_class == 'SASdetector': 286 self.detector.name = data_point 287 elif tagname == 'SDD' and self.parent_class == 'SASdetector': 288 self.detector.distance = data_point 289 self.detector.distance_unit = unit 290 elif tagname == 'slit_length' and self.parent_class == 'SASdetector': 291 self.detector.slit_length = data_point 292 self.detector.slit_length_unit = unit 293 elif tagname == 'x' and self.parent_class == 'offset': 294 self.detector.offset.x = data_point 295 self.detector.offset_unit = unit 296 elif tagname == 'y' and self.parent_class == 'offset': 297 self.detector.offset.y = data_point 298 self.detector.offset_unit = unit 299 elif tagname == 'z' and self.parent_class == 'offset': 300 self.detector.offset.z = data_point 301 self.detector.offset_unit = unit 302 elif tagname == 'x' and self.parent_class == 'beam_center': 303 self.detector.beam_center.x = data_point 304 self.detector.beam_center_unit = unit 305 elif tagname == 'y' and self.parent_class == 'beam_center': 306 self.detector.beam_center.y = data_point 307 self.detector.beam_center_unit = unit 308 elif tagname == 'z' and self.parent_class == 'beam_center': 309 self.detector.beam_center.z = data_point 310 self.detector.beam_center_unit = unit 311 elif tagname == 'x' and self.parent_class == 'pixel_size': 312 self.detector.pixel_size.x = data_point 313 self.detector.pixel_size_unit = unit 314 elif tagname == 'y' and self.parent_class == 'pixel_size': 315 self.detector.pixel_size.y = data_point 316 self.detector.pixel_size_unit = unit 317 elif tagname == 'z' and self.parent_class == 'pixel_size': 318 self.detector.pixel_size.z = data_point 319 self.detector.pixel_size_unit = unit 320 elif tagname == 'roll' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 321 self.detector.orientation.x = data_point 322 self.detector.orientation_unit = unit 323 elif tagname == 'pitch' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 324 self.detector.orientation.y = data_point 325 self.detector.orientation_unit = unit 326 elif tagname == 'yaw' and self.parent_class == 'orientation' and 'SASdetector' in self.names: 327 self.detector.orientation.z = data_point 328 self.detector.orientation_unit = unit 329 ## Collimation and Aperture 330 elif tagname == 'length' and self.parent_class == 'SAScollimation': 331 self.collimation.length = data_point 332 self.collimation.length_unit = unit 333 elif tagname == 'name' and self.parent_class == 'SAScollimation': 334 self.collimation.name = data_point 335 elif tagname == 'distance' and self.parent_class == 'aperture': 336 self.aperture.distance = data_point 337 self.aperture.distance_unit = unit 338 elif tagname == 'x' and self.parent_class == 'size': 339 self.aperture.size.x = data_point 340 self.collimation.size_unit = unit 341 elif tagname == 'y' and self.parent_class == 'size': 342 self.aperture.size.y = data_point 343 self.collimation.size_unit = unit 344 elif tagname == 'z' and self.parent_class == 'size': 345 self.aperture.size.z = data_point 346 self.collimation.size_unit = unit 347 348 ## Process Information 349 elif tagname == 'name' and self.parent_class == 'SASprocess': 350 self.process.name = data_point 351 elif tagname == 'description' and self.parent_class == 'SASprocess': 352 self.process.description = data_point 353 elif tagname == 'date' and self.parent_class == 'SASprocess': 354 try: 355 self.process.date = datetime.datetime.fromtimestamp(data_point) 356 except: 357 self.process.date = data_point 358 elif tagname == 'SASprocessnote': 359 self.process.notes.append(data_point) 360 elif tagname == 'term' and self.parent_class == 'SASprocess': 361 self.process.term.append(data_point) 362 363 ## Transmission Spectrum 364 elif tagname == 'T' and self.parent_class == 'Tdata': 365 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) 366 self.transspectrum.transmission_unit = unit 367 elif tagname == 'Tdev' and self.parent_class == 'Tdata': 368 self.transspectrum.transmission_deviation = np.append(self.transspectrum.transmission_deviation, data_point) 369 self.transspectrum.transmission_deviation_unit = unit 370 elif tagname == 'Lambda' and self.parent_class == 'Tdata': 371 self.transspectrum.wavelength = np.append(self.transspectrum.wavelength, data_point) 372 self.transspectrum.wavelength_unit = unit 373 374 ## Source Information 375 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 376 self.current_datainfo.source.wavelength = data_point 377 self.current_datainfo.source.wavelength_unit = unit 378 elif tagname == 'wavelength_min' and self.parent_class == 'SASsource': 379 self.current_datainfo.source.wavelength_min = data_point 380 self.current_datainfo.source.wavelength_min_unit = unit 381 elif tagname == 'wavelength_max' and self.parent_class == 'SASsource': 382 self.current_datainfo.source.wavelength_max = data_point 383 self.current_datainfo.source.wavelength_max_unit = unit 384 elif tagname == 'wavelength_spread' and self.parent_class == 'SASsource': 385 self.current_datainfo.source.wavelength_spread = data_point 386 self.current_datainfo.source.wavelength_spread_unit = unit 387 elif tagname == 'x' and self.parent_class == 'beam_size': 388 self.current_datainfo.source.beam_size.x = data_point 389 self.current_datainfo.source.beam_size_unit = unit 390 elif tagname == 'y' and self.parent_class == 'beam_size': 391 self.current_datainfo.source.beam_size.y = data_point 392 self.current_datainfo.source.beam_size_unit = unit 393 elif tagname == 'z' and self.parent_class == 'pixel_size': 394 self.current_datainfo.source.data_point.z = data_point 395 self.current_datainfo.source.beam_size_unit = unit 396 elif tagname == 'radiation' and self.parent_class == 'SASsource': 397 self.current_datainfo.source.radiation = data_point 398 elif tagname == 'beam_shape' and self.parent_class == 'SASsource': 399 self.current_datainfo.source.beam_shape = data_point 400 401 ## Everything else goes in meta_data 402 else: 403 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) 404 self.current_datainfo.meta_data[new_key] = data_point 405 406 self.names.remove(tagname_original) 407 length = 0 408 if len(self.names) > 1: 409 length = len(self.names) - 1 410 self.parent_class = self.names[length] 411 121 412 122 413 def is_cansas(self, ext="xml"): … … 134 425 if ext == "svs": 135 426 return True 136 r eturn False427 raise RuntimeError 137 428 138 429 def load_file_and_schema(self, xml_file, schema_path=""): 139 430 """ 140 Loads the file and associates a schema, if a known schemaexists431 Loads the file and associates a schema, if a schema is passed in or if one already exists 141 432 142 433 :param xml_file: The xml file path sent to Reader.read 434 :param schema_path: The path to a schema associated with the xml_file, or find one based on the file 143 435 """ 144 436 base_name = xml_reader.__file__ … … 151 443 152 444 # Generic values for the cansas file based on the version 153 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0")445 self.cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 154 446 if schema_path == "": 155 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \156 (base, cansas_defaults.get("schema")).replace("\\", "/")447 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/{1}".format \ 448 (base, self.cansas_defaults.get("schema")).replace("\\", "/") 157 449 158 450 # Link a schema to the XML file. 159 451 self.set_schema(schema_path) 160 return cansas_defaults 161 162 ## TODO: Test loading invalid CanSAS XML files and see if this works 163 ## TODO: Once works, try adding a warning that the data is invalid 164 def read(self, xml_file, schema_path=""): 165 """ 166 Validate and read in an xml_file file in the canSAS format. 167 168 :param xml_file: A canSAS file path in proper XML format 169 """ 170 # output - Final list of Data1D objects 171 output = [] 172 # ns - Namespace hierarchy for current xml_file object 173 ns_list = [] 174 175 # Check that the file exists 176 if os.path.isfile(xml_file): 177 basename = os.path.basename(xml_file) 178 _, extension = os.path.splitext(basename) 179 # If the file type is not allowed, return nothing 180 if extension in self.ext or self.allow_all: 181 # Get the file location of 182 cansas_defaults = self.load_file_and_schema(xml_file, schema_path) 183 184 # Try to load the file, but raise an error if unable to. 185 # Check the file matches the XML schema 186 try: 187 if self.is_cansas(extension): 188 # Get each SASentry from XML file and add it to a list. 189 entry_list = self.xmlroot.xpath( 190 '/ns:SASroot/ns:SASentry', 191 namespaces={'ns': cansas_defaults.get("ns")}) 192 ns_list.append("SASentry") 193 194 # If multiple files, modify the name for each is unique 195 increment = 0 196 # Parse each SASentry item 197 for entry in entry_list: 198 # Define a new Data1D object with zeroes for 199 # x_vals and y_vals 200 data1d = Data1D(numpy.empty(0), numpy.empty(0), 201 numpy.empty(0), numpy.empty(0)) 202 data1d.dxl = numpy.empty(0) 203 data1d.dxw = numpy.empty(0) 204 205 # If more than one SASentry, increment each in order 206 name = basename 207 if len(entry_list) - 1 > 0: 208 name += "_{0}".format(increment) 209 increment += 1 210 211 # Set the Data1D name and then parse the entry. 212 # The entry is appended to a list of entry values 213 data1d.filename = name 214 data1d.meta_data["loader"] = "CanSAS 1D" 215 216 # Get all preprocessing events and encoding 217 self.set_processing_instructions() 218 data1d.meta_data[PREPROCESS] = \ 219 self.processing_instructions 220 221 # Parse the XML file 222 return_value, extras = \ 223 self._parse_entry(entry, ns_list, data1d) 224 del extras[:] 225 226 return_value = self._final_cleanup(return_value) 227 output.append(return_value) 228 else: 229 raise RuntimeError, "Invalid XML at: {0}".format(\ 230 self.find_invalid_xml()) 231 except: 232 # If the file does not match the schema, raise this error 233 schema_path = "{0}/sas/sascalc/dataloader/readers/schema/cansas1d_invalid.xsd" 234 invalid_xml = self.find_invalid_xml() 235 invalid_xml = "\n\nThe loaded xml file does not fully meet the CanSAS v1.x specification. SasView " + \ 236 "loaded as much of the data as possible.\n\n" + invalid_xml 237 self.errors.add(invalid_xml) 238 self.set_schema(schema_path) 239 if self.is_cansas(): 240 output = self.read(xml_file, schema_path) 241 else: 242 raise RuntimeError, "%s cannot be read" % xml_file 243 return output 244 # Return a list of parsed entries that dataloader can manage 245 return None 246 247 def _final_cleanup(self, data1d): 452 453 def add_data_set(self): 454 """ 455 Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a 456 private method to generate a new data set. 457 458 :param key: NeXus group name for current tree level 459 """ 460 461 if self.current_datainfo and self.current_dataset: 462 self._final_cleanup() 463 self.data = [] 464 self.current_datainfo = DataInfo() 465 466 def _initialize_new_data_set(self, parent_list=None): 467 """ 468 A private class method to generate a new 1D data object. 469 Outside methods should call add_data_set() to be sure any existing data is stored properly. 470 471 :param parent_list: List of names of parent elements 472 """ 473 474 if parent_list is None: 475 parent_list = [] 476 x = np.array(0) 477 y = np.array(0) 478 self.current_dataset = plottable_1D(x, y) 479 480 def add_intermediate(self): 481 """ 482 This method stores any intermediate objects within the final data set after fully reading the set. 483 484 :param parent: The NXclass name for the h5py Group object that just finished being processed 485 """ 486 487 if self.parent_class == 'SASprocess': 488 self.current_datainfo.process.append(self.process) 489 self.process = Process() 490 elif self.parent_class == 'SASdetector': 491 self.current_datainfo.detector.append(self.detector) 492 self.detector = Detector() 493 elif self.parent_class == 'SAStransmission_spectrum': 494 self.current_datainfo.trans_spectrum.append(self.transspectrum) 495 self.transspectrum = TransmissionSpectrum() 496 elif self.parent_class == 'SAScollimation': 497 self.current_datainfo.collimation.append(self.collimation) 498 self.collimation = Collimation() 499 elif self.parent_class == 'SASaperture': 500 self.collimation.aperture.append(self.aperture) 501 self.aperture = Aperture() 502 elif self.parent_class == 'SASdata': 503 self._check_for_empty_resolution() 504 self.data.append(self.current_dataset) 505 506 def _final_cleanup(self): 248 507 """ 249 508 Final cleanup of the Data1D object to be sure it has all the 250 509 appropriate information needed for perspectives 251 252 :param data1d: Data1D object that has been populated 253 """ 254 # Final cleanup 255 # Remove empty nodes, verify array sizes are correct 510 """ 511 512 ## Append errors to dataset and reset class errors 513 self.current_datainfo.errors = set() 256 514 for error in self.errors: 257 data1d.errors.append(error)515 self.current_datainfo.errors.add(error) 258 516 self.errors.clear() 259 numpy.trim_zeros(data1d.x) 260 numpy.trim_zeros(data1d.y) 261 numpy.trim_zeros(data1d.dy) 262 size_dx = data1d.dx.size 263 size_dxl = data1d.dxl.size 264 size_dxw = data1d.dxw.size 265 if data1d._xunit != data1d.x_unit: 266 data1d.x_unit = data1d._xunit 267 if data1d._yunit != data1d.y_unit: 268 data1d.y_unit = data1d._yunit 269 if size_dxl == 0 and size_dxw == 0: 270 data1d.dxl = None 271 data1d.dxw = None 272 numpy.trim_zeros(data1d.dx) 273 elif size_dx == 0: 274 data1d.dx = None 275 size_dx = size_dxl 276 numpy.trim_zeros(data1d.dxl) 277 numpy.trim_zeros(data1d.dxw) 278 return data1d 517 518 ## Combine all plottables with datainfo and append each to output 519 ## Type cast data arrays to float64 and find min/max as appropriate 520 for dataset in self.data: 521 if dataset.x is not None: 522 dataset.x = np.delete(dataset.x, [0]) 523 dataset.x = dataset.x.astype(np.float64) 524 dataset.xmin = np.min(dataset.x) 525 dataset.xmax = np.max(dataset.x) 526 if dataset.y is not None: 527 dataset.y = np.delete(dataset.y, [0]) 528 dataset.y = dataset.y.astype(np.float64) 529 dataset.ymin = np.min(dataset.y) 530 dataset.ymax = np.max(dataset.y) 531 if dataset.dx is not None: 532 dataset.dx = np.delete(dataset.dx, [0]) 533 dataset.dx = dataset.dx.astype(np.float64) 534 if dataset.dxl is not None: 535 dataset.dxl = np.delete(dataset.dxl, [0]) 536 dataset.dxl = dataset.dxl.astype(np.float64) 537 if dataset.dxw is not None: 538 dataset.dxw = np.delete(dataset.dxw, [0]) 539 dataset.dxw = dataset.dxw.astype(np.float64) 540 if dataset.dy is not None: 541 dataset.dy = np.delete(dataset.dy, [0]) 542 dataset.dy = dataset.dy.astype(np.float64) 543 np.trim_zeros(dataset.x) 544 np.trim_zeros(dataset.y) 545 np.trim_zeros(dataset.dy) 546 final_dataset = combine_data(dataset, self.current_datainfo) 547 self.output.append(final_dataset) 279 548 280 549 def _create_unique_key(self, dictionary, name, numb=0): 281 550 """ 282 551 Create a unique key value for any dictionary to prevent overwriting 283 Recurse suntil a unique key value is found.552 Recurse until a unique key value is found. 284 553 285 554 :param dictionary: A dictionary with any number of entries … … 294 563 return name 295 564 296 def _unit_conversion(self, node, new_current_level, data1d, \ 297 tagname, node_value): 565 def _get_node_value(self, node, tagname): 566 """ 567 Get the value of a node and any applicable units 568 569 :param node: The XML node to get the value of 570 :param tagname: The tagname of the node 571 """ 572 #Get the text from the node and convert all whitespace to spaces 573 units = '' 574 node_value = node.text 575 if node_value is not None: 576 node_value = ' '.join(node_value.split()) 577 else: 578 node_value = "" 579 580 # If the value is a float, compile with units. 581 if self.ns_list.ns_datatype == "float": 582 # If an empty value is given, set as zero. 583 if node_value is None or node_value.isspace() \ 584 or node_value.lower() == "nan": 585 node_value = "0.0" 586 #Convert the value to the base units 587 node_value, units = self._unit_conversion(node, tagname, node_value) 588 589 # If the value is a timestamp, convert to a datetime object 590 elif self.ns_list.ns_datatype == "timestamp": 591 if node_value is None or node_value.isspace(): 592 pass 593 else: 594 try: 595 node_value = \ 596 datetime.datetime.fromtimestamp(node_value) 597 except ValueError: 598 node_value = None 599 return node_value, units 600 601 def _unit_conversion(self, node, tagname, node_value): 298 602 """ 299 603 A unit converter method used to convert the data included in the file 300 604 to the default units listed in data_info 301 605 302 :param new_current_level: cansas_constants level as returned by 303 iterate_namespace 304 :param attr: The attributes of the node 305 :param data1d: Where the values will be saved 606 :param node: XML node 607 :param tagname: name of the node 306 608 :param node_value: The value of the current dom node 307 609 """ … … 310 612 err_msg = None 311 613 default_unit = None 312 if 'unit' in attr and new_current_level.get('unit') is not None:614 if 'unit' in attr and attr.get('unit') is not None and not self.ns_list.ns_optional: 313 615 try: 314 616 local_unit = attr['unit'] 315 if isinstance(node_value, float) is False: 316 exec("node_value = float({0})".format(node_value)) 317 unitname = new_current_level.get("unit") 318 exec "default_unit = data1d.{0}".format(unitname) 319 if local_unit is not None and default_unit is not None and \ 320 local_unit.lower() != default_unit.lower() \ 617 if not isinstance(node_value, float): 618 node_value = float(node_value) 619 unitname = self.ns_list.current_level.get("unit", "") 620 if "SASdetector" in self.names: 621 save_in = "detector" 622 elif "aperture" in self.names: 623 save_in = "aperture" 624 elif "SAScollimation" in self.names: 625 save_in = "collimation" 626 elif "SAStransmission_spectrum" in self.names: 627 save_in = "transspectrum" 628 elif "SASdata" in self.names: 629 x = np.zeros(1) 630 y = np.zeros(1) 631 self.current_data1d = Data1D(x, y) 632 save_in = "current_data1d" 633 elif "SASsource" in self.names: 634 save_in = "current_datainfo.source" 635 elif "SASsample" in self.names: 636 save_in = "current_datainfo.sample" 637 elif "SASprocess" in self.names: 638 save_in = "process" 639 else: 640 save_in = "current_datainfo" 641 exec "default_unit = self.{0}.{1}".format(save_in, unitname) 642 if local_unit and default_unit and local_unit.lower() != default_unit.lower() \ 321 643 and local_unit.lower() != "none": 322 644 if HAS_CONVERTER == True: … … 345 667 if err_msg: 346 668 self.errors.add(err_msg) 347 node_value = "float({0})".format(node_value)348 669 return node_value, value_unit 349 670 350 def _check_for_empty_data(self , data1d):671 def _check_for_empty_data(self): 351 672 """ 352 673 Creates an empty data set if no data is passed to the reader … … 354 675 :param data1d: presumably a Data1D object 355 676 """ 356 if data1d == None: 357 self.errors = set() 358 x_vals = numpy.empty(0) 359 y_vals = numpy.empty(0) 360 dx_vals = numpy.empty(0) 361 dy_vals = numpy.empty(0) 362 dxl = numpy.empty(0) 363 dxw = numpy.empty(0) 364 data1d = Data1D(x_vals, y_vals, dx_vals, dy_vals) 365 data1d.dxl = dxl 366 data1d.dxw = dxw 367 return data1d 368 369 def _handle_special_cases(self, tagname, data1d, children): 370 """ 371 Handle cases where the data type in Data1D is a dictionary or list 372 373 :param tagname: XML tagname in use 374 :param data1d: The original Data1D object 375 :param children: Child nodes of node 376 :param node: existing node with tag name 'tagname' 377 """ 378 if tagname == "SASdetector": 379 data1d = Detector() 380 elif tagname == "SAScollimation": 381 data1d = Collimation() 382 elif tagname == "SAStransmission_spectrum": 383 data1d = TransmissionSpectrum() 384 elif tagname == "SASprocess": 385 data1d = Process() 386 for child in children: 387 if child.tag.replace(self.base_ns, "") == "term": 388 term_attr = {} 389 for attr in child.keys(): 390 term_attr[attr] = \ 391 ' '.join(child.get(attr).split()) 392 if child.text is not None: 393 term_attr['value'] = \ 394 ' '.join(child.text.split()) 395 data1d.term.append(term_attr) 396 elif tagname == "aperture": 397 data1d = Aperture() 398 if tagname == "Idata" and children is not None: 399 data1d = self._check_for_empty_resolution(data1d, children) 400 return data1d 401 402 def _check_for_empty_resolution(self, data1d, children): 677 if self.current_dataset == None: 678 x_vals = np.empty(0) 679 y_vals = np.empty(0) 680 dx_vals = np.empty(0) 681 dy_vals = np.empty(0) 682 dxl = np.empty(0) 683 dxw = np.empty(0) 684 self.current_dataset = plottable_1D(x_vals, y_vals, dx_vals, dy_vals) 685 self.current_dataset.dxl = dxl 686 self.current_dataset.dxw = dxw 687 688 def _check_for_empty_resolution(self): 403 689 """ 404 690 A method to check all resolution data sets are the same size as I and Q … … 408 694 dq_exists = False 409 695 di_exists = False 410 for child in children: 411 tag = child.tag.replace(self.base_ns, "") 412 if tag == "dQl": 413 dql_exists = True 414 if tag == "dQw": 415 dqw_exists = True 416 if tag == "Qdev": 417 dq_exists = True 418 if tag == "Idev": 419 di_exists = True 420 if dqw_exists and dql_exists == False: 421 data1d.dxl = numpy.append(data1d.dxl, 0.0) 422 elif dql_exists and dqw_exists == False: 423 data1d.dxw = numpy.append(data1d.dxw, 0.0) 424 elif dql_exists == False and dqw_exists == False \ 425 and dq_exists == False: 426 data1d.dx = numpy.append(data1d.dx, 0.0) 427 if di_exists == False: 428 data1d.dy = numpy.append(data1d.dy, 0.0) 429 return data1d 430 431 def _restore_original_case(self, 432 tagname_original, 433 tagname, 434 save_data1d, 435 data1d): 436 """ 437 Save the special case data to the appropriate location and restore 438 the original Data1D object 439 440 :param tagname_original: Unmodified tagname for the node 441 :param tagname: modified tagname for the node 442 :param save_data1d: The original Data1D object 443 :param data1d: If a special case was handled, an object of that type 444 """ 445 if tagname_original == "SASdetector": 446 save_data1d.detector.append(data1d) 447 elif tagname_original == "SAScollimation": 448 save_data1d.collimation.append(data1d) 449 elif tagname == "SAStransmission_spectrum": 450 save_data1d.trans_spectrum.append(data1d) 451 elif tagname_original == "SASprocess": 452 save_data1d.process.append(data1d) 453 elif tagname_original == "aperture": 454 save_data1d.aperture.append(data1d) 455 else: 456 save_data1d = data1d 457 return save_data1d 458 459 def _handle_attributes(self, node, data1d, cs_values, tagname): 460 """ 461 Process all of the attributes for a node 462 """ 463 attr = node.attrib 464 if attr is not None: 465 for key in node.keys(): 466 try: 467 node_value, unit = self._get_node_value(node, cs_values, \ 468 data1d, tagname) 469 cansas_attrib = \ 470 cs_values.current_level.get("attributes").get(key) 471 attrib_variable = cansas_attrib.get("variable") 472 if key == 'unit' and unit != '': 473 attrib_value = unit 474 else: 475 attrib_value = node.attrib[key] 476 store_attr = attrib_variable.format("data1d", 477 attrib_value, 478 key, 479 node_value) 480 exec store_attr 481 except AttributeError: 482 pass 483 return data1d 484 485 def _get_node_value(self, node, cs_values, data1d, tagname): 486 """ 487 Get the value of a node and any applicable units 488 489 :param node: The XML node to get the value of 490 :param cs_values: A CansasConstants.CurrentLevel object 491 :param attr: The node attributes 492 :param dataid: The working object to be modified 493 :param tagname: The tagname of the node 494 """ 495 #Get the text from the node and convert all whitespace to spaces 496 units = '' 497 node_value = node.text 498 if node_value == "": 499 node_value = None 500 if node_value is not None: 501 node_value = ' '.join(node_value.split()) 502 503 # If the value is a float, compile with units. 504 if cs_values.ns_datatype == "float": 505 # If an empty value is given, set as zero. 506 if node_value is None or node_value.isspace() \ 507 or node_value.lower() == "nan": 508 node_value = "0.0" 509 #Convert the value to the base units 510 node_value, units = self._unit_conversion(node, \ 511 cs_values.current_level, data1d, tagname, node_value) 512 513 # If the value is a timestamp, convert to a datetime object 514 elif cs_values.ns_datatype == "timestamp": 515 if node_value is None or node_value.isspace(): 516 pass 517 else: 518 try: 519 node_value = \ 520 datetime.datetime.fromtimestamp(node_value) 521 except ValueError: 522 node_value = None 523 return node_value, units 524 525 def _parse_entry(self, dom, names=None, data1d=None, extras=None): 526 """ 527 Parse a SASEntry - new recursive method for parsing the dom of 528 the CanSAS data format. This will allow multiple data files 529 and extra nodes to be read in simultaneously. 530 531 :param dom: dom object with a namespace base of names 532 :param names: A list of element names that lead up to the dom object 533 :param data1d: The data1d object that will be modified 534 :param extras: Any values that should go into meta_data when data1d 535 is not a Data1D object 536 """ 537 538 if extras is None: 539 extras = [] 540 if names is None or names == []: 541 names = ["SASentry"] 542 543 data1d = self._check_for_empty_data(data1d) 544 545 self.base_ns = "{0}{1}{2}".format("{", \ 546 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 547 tagname = '' 548 tagname_original = '' 549 550 # Go through each child in the parent element 551 for node in dom: 552 try: 553 # Get the element name and set the current names level 554 tagname = node.tag.replace(self.base_ns, "") 555 tagname_original = tagname 556 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or\ 557 tagname == "invariant": 558 continue 559 names.append(tagname) 560 children = node.getchildren() 561 if len(children) == 0: 562 children = None 563 save_data1d = data1d 564 565 # Look for special cases 566 data1d = self._handle_special_cases(tagname, data1d, children) 567 568 # Get where to store content 569 cs_values = CONSTANTS.iterate_namespace(names) 570 # If the element is a child element, recurse 571 if children is not None: 572 # Returned value is new Data1D object with all previous and 573 # new values in it. 574 data1d, extras = self._parse_entry(node, 575 names, data1d, extras) 576 577 #Get the information from the node 578 node_value, _ = self._get_node_value(node, cs_values, \ 579 data1d, tagname) 580 581 # If appending to a dictionary (meta_data | run_name) 582 # make sure the key is unique 583 if cs_values.ns_variable == "{0}.meta_data[\"{2}\"] = \"{1}\"": 584 # If we are within a Process, Detector, Collimation or 585 # Aperture instance, pull out old data1d 586 tagname = self._create_unique_key(data1d.meta_data, \ 587 tagname, 0) 588 if isinstance(data1d, Data1D) == False: 589 store_me = cs_values.ns_variable.format("data1d", \ 590 node_value, tagname) 591 extras.append(store_me) 592 cs_values.ns_variable = None 593 if cs_values.ns_variable == "{0}.run_name[\"{2}\"] = \"{1}\"": 594 tagname = self._create_unique_key(data1d.run_name, \ 595 tagname, 0) 596 597 # Check for Data1D object and any extra commands to save 598 if isinstance(data1d, Data1D): 599 for item in extras: 600 exec item 601 # Don't bother saving empty information unless it is a float 602 if cs_values.ns_variable is not None and \ 603 node_value is not None and \ 604 node_value.isspace() == False: 605 # Format a string and then execute it. 606 store_me = cs_values.ns_variable.format("data1d", \ 607 node_value, tagname) 608 exec store_me 609 # Get attributes and process them 610 data1d = self._handle_attributes(node, data1d, cs_values, \ 611 tagname) 612 613 except TypeError: 614 pass 615 except Exception as excep: 616 exc_type, exc_obj, exc_tb = sys.exc_info() 617 fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] 618 print(excep, exc_type, fname, exc_tb.tb_lineno, \ 619 tagname, exc_obj) 620 finally: 621 # Save special cases in original data1d object 622 # then restore the data1d 623 save_data1d = self._restore_original_case(tagname_original, \ 624 tagname, save_data1d, data1d) 625 if tagname_original == "fitting_plug_in" or \ 626 tagname_original == "invariant" or \ 627 tagname_original == "pr_inversion": 628 pass 629 else: 630 data1d = save_data1d 631 # Remove tagname from names to restore original base 632 names.remove(tagname_original) 633 return data1d, extras 634 635 def _get_pi_string(self): 636 """ 637 Creates the processing instructions header for writing to file 638 """ 639 pis = self.return_processing_instructions() 640 if len(pis) > 0: 641 pi_tree = self.create_tree(pis[0]) 642 i = 1 643 for i in range(1, len(pis) - 1): 644 pi_tree = self.append(pis[i], pi_tree) 645 pi_string = self.to_string(pi_tree) 646 else: 647 pi_string = "" 648 return pi_string 649 650 def _create_main_node(self): 651 """ 652 Creates the primary xml header used when writing to file 653 """ 654 xsi = "http://www.w3.org/2001/XMLSchema-instance" 655 version = self.cansas_version 656 n_s = CANSAS_NS.get(version).get("ns") 657 if version == "1.1": 658 url = "http://www.cansas.org/formats/1.1/" 659 else: 660 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 661 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 662 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 663 "version" : version} 664 nsmap = {'xsi' : xsi, None: n_s} 665 666 main_node = self.create_element("{" + n_s + "}SASroot", 667 attrib=attrib, nsmap=nsmap) 668 return main_node 669 670 def _write_run_names(self, datainfo, entry_node): 671 """ 672 Writes the run names to the XML file 673 674 :param datainfo: The Data1D object the information is coming from 675 :param entry_node: lxml node ElementTree object to be appended to 676 """ 677 if datainfo.run == None or datainfo.run == []: 678 datainfo.run.append(RUN_NAME_DEFAULT) 679 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 680 for item in datainfo.run: 681 runname = {} 682 if item in datainfo.run_name and \ 683 len(str(datainfo.run_name[item])) > 1: 684 runname = {'name': datainfo.run_name[item]} 685 self.write_node(entry_node, "Run", item, runname) 686 687 def _write_data(self, datainfo, entry_node): 688 """ 689 Writes the I and Q data to the XML file 690 691 :param datainfo: The Data1D object the information is coming from 692 :param entry_node: lxml node ElementTree object to be appended to 693 """ 694 node = self.create_element("SASdata") 695 self.append(node, entry_node) 696 697 for i in range(len(datainfo.x)): 698 point = self.create_element("Idata") 699 node.append(point) 700 self.write_node(point, "Q", datainfo.x[i], 701 {'unit': datainfo.x_unit}) 702 if len(datainfo.y) >= i: 703 self.write_node(point, "I", datainfo.y[i], 704 {'unit': datainfo.y_unit}) 705 if datainfo.dy != None and len(datainfo.dy) > i: 706 self.write_node(point, "Idev", datainfo.dy[i], 707 {'unit': datainfo.y_unit}) 708 if datainfo.dx != None and len(datainfo.dx) > i: 709 self.write_node(point, "Qdev", datainfo.dx[i], 710 {'unit': datainfo.x_unit}) 711 if datainfo.dxw != None and len(datainfo.dxw) > i: 712 self.write_node(point, "dQw", datainfo.dxw[i], 713 {'unit': datainfo.x_unit}) 714 if datainfo.dxl != None and len(datainfo.dxl) > i: 715 self.write_node(point, "dQl", datainfo.dxl[i], 716 {'unit': datainfo.x_unit}) 717 718 def _write_trans_spectrum(self, datainfo, entry_node): 719 """ 720 Writes the transmission spectrum data to the XML file 721 722 :param datainfo: The Data1D object the information is coming from 723 :param entry_node: lxml node ElementTree object to be appended to 724 """ 725 for i in range(len(datainfo.trans_spectrum)): 726 spectrum = datainfo.trans_spectrum[i] 727 node = self.create_element("SAStransmission_spectrum", 728 {"name" : spectrum.name}) 729 self.append(node, entry_node) 730 if isinstance(spectrum.timestamp, datetime.datetime): 731 node.setAttribute("timestamp", spectrum.timestamp) 732 for i in range(len(spectrum.wavelength)): 733 point = self.create_element("Tdata") 734 node.append(point) 735 self.write_node(point, "Lambda", spectrum.wavelength[i], 736 {'unit': spectrum.wavelength_unit}) 737 self.write_node(point, "T", spectrum.transmission[i], 738 {'unit': spectrum.transmission_unit}) 739 if spectrum.transmission_deviation != None \ 740 and len(spectrum.transmission_deviation) >= i: 741 self.write_node(point, "Tdev", 742 spectrum.transmission_deviation[i], 743 {'unit': 744 spectrum.transmission_deviation_unit}) 745 746 def _write_sample_info(self, datainfo, entry_node): 747 """ 748 Writes the sample information to the XML file 749 750 :param datainfo: The Data1D object the information is coming from 751 :param entry_node: lxml node ElementTree object to be appended to 752 """ 753 sample = self.create_element("SASsample") 754 if datainfo.sample.name is not None: 755 self.write_attribute(sample, "name", 756 str(datainfo.sample.name)) 757 self.append(sample, entry_node) 758 self.write_node(sample, "ID", str(datainfo.sample.ID)) 759 self.write_node(sample, "thickness", datainfo.sample.thickness, 760 {"unit": datainfo.sample.thickness_unit}) 761 self.write_node(sample, "transmission", datainfo.sample.transmission) 762 self.write_node(sample, "temperature", datainfo.sample.temperature, 763 {"unit": datainfo.sample.temperature_unit}) 764 765 pos = self.create_element("position") 766 written = self.write_node(pos, 767 "x", 768 datainfo.sample.position.x, 769 {"unit": datainfo.sample.position_unit}) 770 written = written | self.write_node( \ 771 pos, "y", datainfo.sample.position.y, 772 {"unit": datainfo.sample.position_unit}) 773 written = written | self.write_node( \ 774 pos, "z", datainfo.sample.position.z, 775 {"unit": datainfo.sample.position_unit}) 776 if written == True: 777 self.append(pos, sample) 778 779 ori = self.create_element("orientation") 780 written = self.write_node(ori, "roll", 781 datainfo.sample.orientation.x, 782 {"unit": datainfo.sample.orientation_unit}) 783 written = written | self.write_node( \ 784 ori, "pitch", datainfo.sample.orientation.y, 785 {"unit": datainfo.sample.orientation_unit}) 786 written = written | self.write_node( \ 787 ori, "yaw", datainfo.sample.orientation.z, 788 {"unit": datainfo.sample.orientation_unit}) 789 if written == True: 790 self.append(ori, sample) 791 792 for item in datainfo.sample.details: 793 self.write_node(sample, "details", item) 794 795 def _write_instrument(self, datainfo, entry_node): 796 """ 797 Writes the instrumental information to the XML file 798 799 :param datainfo: The Data1D object the information is coming from 800 :param entry_node: lxml node ElementTree object to be appended to 801 """ 802 instr = self.create_element("SASinstrument") 803 self.append(instr, entry_node) 804 self.write_node(instr, "name", datainfo.instrument) 805 return instr 806 807 def _write_source(self, datainfo, instr): 808 """ 809 Writes the source information to the XML file 810 811 :param datainfo: The Data1D object the information is coming from 812 :param instr: instrument node to be appended to 813 """ 814 source = self.create_element("SASsource") 815 if datainfo.source.name is not None: 816 self.write_attribute(source, "name", 817 str(datainfo.source.name)) 818 self.append(source, instr) 819 if datainfo.source.radiation == None or datainfo.source.radiation == '': 820 datainfo.source.radiation = "neutron" 821 self.write_node(source, "radiation", datainfo.source.radiation) 822 823 size = self.create_element("beam_size") 824 if datainfo.source.beam_size_name is not None: 825 self.write_attribute(size, "name", 826 str(datainfo.source.beam_size_name)) 827 written = self.write_node( \ 828 size, "x", datainfo.source.beam_size.x, 829 {"unit": datainfo.source.beam_size_unit}) 830 written = written | self.write_node( \ 831 size, "y", datainfo.source.beam_size.y, 832 {"unit": datainfo.source.beam_size_unit}) 833 written = written | self.write_node( \ 834 size, "z", datainfo.source.beam_size.z, 835 {"unit": datainfo.source.beam_size_unit}) 836 if written == True: 837 self.append(size, source) 838 839 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 840 self.write_node(source, "wavelength", 841 datainfo.source.wavelength, 842 {"unit": datainfo.source.wavelength_unit}) 843 self.write_node(source, "wavelength_min", 844 datainfo.source.wavelength_min, 845 {"unit": datainfo.source.wavelength_min_unit}) 846 self.write_node(source, "wavelength_max", 847 datainfo.source.wavelength_max, 848 {"unit": datainfo.source.wavelength_max_unit}) 849 self.write_node(source, "wavelength_spread", 850 datainfo.source.wavelength_spread, 851 {"unit": datainfo.source.wavelength_spread_unit}) 852 853 def _write_collimation(self, datainfo, instr): 854 """ 855 Writes the collimation information to the XML file 856 857 :param datainfo: The Data1D object the information is coming from 858 :param instr: lxml node ElementTree object to be appended to 859 """ 860 if datainfo.collimation == [] or datainfo.collimation == None: 861 coll = Collimation() 862 datainfo.collimation.append(coll) 863 for item in datainfo.collimation: 864 coll = self.create_element("SAScollimation") 865 if item.name is not None: 866 self.write_attribute(coll, "name", str(item.name)) 867 self.append(coll, instr) 868 869 self.write_node(coll, "length", item.length, 870 {"unit": item.length_unit}) 871 872 for aperture in item.aperture: 873 apert = self.create_element("aperture") 874 if aperture.name is not None: 875 self.write_attribute(apert, "name", str(aperture.name)) 876 if aperture.type is not None: 877 self.write_attribute(apert, "type", str(aperture.type)) 878 self.append(apert, coll) 879 880 size = self.create_element("size") 881 if aperture.size_name is not None: 882 self.write_attribute(size, "name", 883 str(aperture.size_name)) 884 written = self.write_node(size, "x", aperture.size.x, 885 {"unit": aperture.size_unit}) 886 written = written | self.write_node( \ 887 size, "y", aperture.size.y, 888 {"unit": aperture.size_unit}) 889 written = written | self.write_node( \ 890 size, "z", aperture.size.z, 891 {"unit": aperture.size_unit}) 892 if written == True: 893 self.append(size, apert) 894 895 self.write_node(apert, "distance", aperture.distance, 896 {"unit": aperture.distance_unit}) 897 898 def _write_detectors(self, datainfo, instr): 899 """ 900 Writes the detector information to the XML file 901 902 :param datainfo: The Data1D object the information is coming from 903 :param inst: lxml instrument node to be appended to 904 """ 905 if datainfo.detector == None or datainfo.detector == []: 906 det = Detector() 907 det.name = "" 908 datainfo.detector.append(det) 909 910 for item in datainfo.detector: 911 det = self.create_element("SASdetector") 912 written = self.write_node(det, "name", item.name) 913 written = written | self.write_node(det, "SDD", item.distance, 914 {"unit": item.distance_unit}) 915 if written == True: 916 self.append(det, instr) 917 918 off = self.create_element("offset") 919 written = self.write_node(off, "x", item.offset.x, 920 {"unit": item.offset_unit}) 921 written = written | self.write_node(off, "y", item.offset.y, 922 {"unit": item.offset_unit}) 923 written = written | self.write_node(off, "z", item.offset.z, 924 {"unit": item.offset_unit}) 925 if written == True: 926 self.append(off, det) 927 928 ori = self.create_element("orientation") 929 written = self.write_node(ori, "roll", item.orientation.x, 930 {"unit": item.orientation_unit}) 931 written = written | self.write_node(ori, "pitch", 932 item.orientation.y, 933 {"unit": item.orientation_unit}) 934 written = written | self.write_node(ori, "yaw", 935 item.orientation.z, 936 {"unit": item.orientation_unit}) 937 if written == True: 938 self.append(ori, det) 939 940 center = self.create_element("beam_center") 941 written = self.write_node(center, "x", item.beam_center.x, 942 {"unit": item.beam_center_unit}) 943 written = written | self.write_node(center, "y", 944 item.beam_center.y, 945 {"unit": item.beam_center_unit}) 946 written = written | self.write_node(center, "z", 947 item.beam_center.z, 948 {"unit": item.beam_center_unit}) 949 if written == True: 950 self.append(center, det) 951 952 pix = self.create_element("pixel_size") 953 written = self.write_node(pix, "x", item.pixel_size.x, 954 {"unit": item.pixel_size_unit}) 955 written = written | self.write_node(pix, "y", item.pixel_size.y, 956 {"unit": item.pixel_size_unit}) 957 written = written | self.write_node(pix, "z", item.pixel_size.z, 958 {"unit": item.pixel_size_unit}) 959 written = written | self.write_node(det, "slit_length", 960 item.slit_length, 961 {"unit": item.slit_length_unit}) 962 if written == True: 963 self.append(pix, det) 964 965 def _write_process_notes(self, datainfo, entry_node): 966 """ 967 Writes the process notes to the XML file 968 969 :param datainfo: The Data1D object the information is coming from 970 :param entry_node: lxml node ElementTree object to be appended to 971 972 """ 973 for item in datainfo.process: 974 node = self.create_element("SASprocess") 975 self.append(node, entry_node) 976 self.write_node(node, "name", item.name) 977 self.write_node(node, "date", item.date) 978 self.write_node(node, "description", item.description) 979 for term in item.term: 980 value = term['value'] 981 del term['value'] 982 self.write_node(node, "term", value, term) 983 for note in item.notes: 984 self.write_node(node, "SASprocessnote", note) 985 if len(item.notes) == 0: 986 self.write_node(node, "SASprocessnote", "") 987 988 def _write_notes(self, datainfo, entry_node): 989 """ 990 Writes the notes to the XML file and creates an empty note if none 991 exist 992 993 :param datainfo: The Data1D object the information is coming from 994 :param entry_node: lxml node ElementTree object to be appended to 995 996 """ 997 if len(datainfo.notes) == 0: 998 node = self.create_element("SASnote") 999 self.append(node, entry_node) 1000 else: 1001 for item in datainfo.notes: 1002 node = self.create_element("SASnote") 1003 self.write_text(node, item) 1004 self.append(node, entry_node) 1005 1006 def _check_origin(self, entry_node, doc, frm): 1007 """ 1008 Return the document, and the SASentry node associated with 1009 the data we just wrote. 1010 If the calling function was not the cansas reader, return a minidom 1011 object rather than an lxml object. 1012 1013 :param entry_node: lxml node ElementTree object to be appended to 1014 :param doc: entire xml tree 1015 """ 1016 if not frm: 1017 frm = inspect.stack()[1] 1018 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1019 mod_name = mod_name.replace(".py", "") 1020 mod = mod_name.split("sas/") 1021 mod_name = mod[1] 1022 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1023 string = self.to_string(doc, pretty_print=False) 1024 doc = parseString(string) 1025 node_name = entry_node.tag 1026 node_list = doc.getElementsByTagName(node_name) 1027 entry_node = node_list.item(0) 1028 return doc, entry_node 696 if self.current_dataset.dxl is not None: 697 dql_exists = True 698 if self.current_dataset.dxw is not None: 699 dqw_exists = True 700 if self.current_dataset.dx is not None: 701 dq_exists = True 702 if self.current_dataset.dy is not None: 703 di_exists = True 704 if dqw_exists and not dql_exists: 705 array_size = self.current_dataset.dxw.size - 1 706 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 707 elif dql_exists and not dqw_exists: 708 array_size = self.current_dataset.dxl.size - 1 709 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 710 elif not dql_exists and not dqw_exists and not dq_exists: 711 array_size = self.current_dataset.x.size - 1 712 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 713 if not di_exists: 714 array_size = self.current_dataset.y.size - 1 715 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 716 717 718 ####### All methods below are for writing CanSAS XML files ####### 719 720 721 def write(self, filename, datainfo): 722 """ 723 Write the content of a Data1D as a CanSAS XML file 724 725 :param filename: name of the file to write 726 :param datainfo: Data1D object 727 """ 728 # Create XML document 729 doc, _ = self._to_xml_doc(datainfo) 730 # Write the file 731 file_ref = open(filename, 'w') 732 if self.encoding == None: 733 self.encoding = "UTF-8" 734 doc.write(file_ref, encoding=self.encoding, 735 pretty_print=True, xml_declaration=True) 736 file_ref.close() 1029 737 1030 738 def _to_xml_doc(self, datainfo): … … 1095 803 return False 1096 804 1097 def write(self, filename, datainfo): 1098 """ 1099 Write the content of a Data1D as a CanSAS XML file 1100 1101 :param filename: name of the file to write 1102 :param datainfo: Data1D object 1103 """ 1104 # Create XML document 1105 doc, _ = self._to_xml_doc(datainfo) 1106 # Write the file 1107 file_ref = open(filename, 'w') 1108 if self.encoding == None: 1109 self.encoding = "UTF-8" 1110 doc.write(file_ref, encoding=self.encoding, 1111 pretty_print=True, xml_declaration=True) 1112 file_ref.close() 805 def _get_pi_string(self): 806 """ 807 Creates the processing instructions header for writing to file 808 """ 809 pis = self.return_processing_instructions() 810 if len(pis) > 0: 811 pi_tree = self.create_tree(pis[0]) 812 i = 1 813 for i in range(1, len(pis) - 1): 814 pi_tree = self.append(pis[i], pi_tree) 815 pi_string = self.to_string(pi_tree) 816 else: 817 pi_string = "" 818 return pi_string 819 820 def _create_main_node(self): 821 """ 822 Creates the primary xml header used when writing to file 823 """ 824 xsi = "http://www.w3.org/2001/XMLSchema-instance" 825 version = self.cansas_version 826 n_s = CANSAS_NS.get(version).get("ns") 827 if version == "1.1": 828 url = "http://www.cansas.org/formats/1.1/" 829 else: 830 url = "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/" 831 schema_location = "{0} {1}cansas1d.xsd".format(n_s, url) 832 attrib = {"{" + xsi + "}schemaLocation" : schema_location, 833 "version" : version} 834 nsmap = {'xsi' : xsi, None: n_s} 835 836 main_node = self.create_element("{" + n_s + "}SASroot", 837 attrib=attrib, nsmap=nsmap) 838 return main_node 839 840 def _write_run_names(self, datainfo, entry_node): 841 """ 842 Writes the run names to the XML file 843 844 :param datainfo: The Data1D object the information is coming from 845 :param entry_node: lxml node ElementTree object to be appended to 846 """ 847 if datainfo.run == None or datainfo.run == []: 848 datainfo.run.append(RUN_NAME_DEFAULT) 849 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT 850 for item in datainfo.run: 851 runname = {} 852 if item in datainfo.run_name and \ 853 len(str(datainfo.run_name[item])) > 1: 854 runname = {'name': datainfo.run_name[item]} 855 self.write_node(entry_node, "Run", item, runname) 856 857 def _write_data(self, datainfo, entry_node): 858 """ 859 Writes the I and Q data to the XML file 860 861 :param datainfo: The Data1D object the information is coming from 862 :param entry_node: lxml node ElementTree object to be appended to 863 """ 864 node = self.create_element("SASdata") 865 self.append(node, entry_node) 866 867 for i in range(len(datainfo.x)): 868 point = self.create_element("Idata") 869 node.append(point) 870 self.write_node(point, "Q", datainfo.x[i], 871 {'unit': datainfo.x_unit}) 872 if len(datainfo.y) >= i: 873 self.write_node(point, "I", datainfo.y[i], 874 {'unit': datainfo.y_unit}) 875 if datainfo.dy != None and len(datainfo.dy) > i: 876 self.write_node(point, "Idev", datainfo.dy[i], 877 {'unit': datainfo.y_unit}) 878 if datainfo.dx != None and len(datainfo.dx) > i: 879 self.write_node(point, "Qdev", datainfo.dx[i], 880 {'unit': datainfo.x_unit}) 881 if datainfo.dxw != None and len(datainfo.dxw) > i: 882 self.write_node(point, "dQw", datainfo.dxw[i], 883 {'unit': datainfo.x_unit}) 884 if datainfo.dxl != None and len(datainfo.dxl) > i: 885 self.write_node(point, "dQl", datainfo.dxl[i], 886 {'unit': datainfo.x_unit}) 887 888 def _write_trans_spectrum(self, datainfo, entry_node): 889 """ 890 Writes the transmission spectrum data to the XML file 891 892 :param datainfo: The Data1D object the information is coming from 893 :param entry_node: lxml node ElementTree object to be appended to 894 """ 895 for i in range(len(datainfo.trans_spectrum)): 896 spectrum = datainfo.trans_spectrum[i] 897 node = self.create_element("SAStransmission_spectrum", 898 {"name" : spectrum.name}) 899 self.append(node, entry_node) 900 if isinstance(spectrum.timestamp, datetime.datetime): 901 node.setAttribute("timestamp", spectrum.timestamp) 902 for i in range(len(spectrum.wavelength)): 903 point = self.create_element("Tdata") 904 node.append(point) 905 self.write_node(point, "Lambda", spectrum.wavelength[i], 906 {'unit': spectrum.wavelength_unit}) 907 self.write_node(point, "T", spectrum.transmission[i], 908 {'unit': spectrum.transmission_unit}) 909 if spectrum.transmission_deviation != None \ 910 and len(spectrum.transmission_deviation) >= i: 911 self.write_node(point, "Tdev", 912 spectrum.transmission_deviation[i], 913 {'unit': 914 spectrum.transmission_deviation_unit}) 915 916 def _write_sample_info(self, datainfo, entry_node): 917 """ 918 Writes the sample information to the XML file 919 920 :param datainfo: The Data1D object the information is coming from 921 :param entry_node: lxml node ElementTree object to be appended to 922 """ 923 sample = self.create_element("SASsample") 924 if datainfo.sample.name is not None: 925 self.write_attribute(sample, "name", 926 str(datainfo.sample.name)) 927 self.append(sample, entry_node) 928 self.write_node(sample, "ID", str(datainfo.sample.ID)) 929 self.write_node(sample, "thickness", datainfo.sample.thickness, 930 {"unit": datainfo.sample.thickness_unit}) 931 self.write_node(sample, "transmission", datainfo.sample.transmission) 932 self.write_node(sample, "temperature", datainfo.sample.temperature, 933 {"unit": datainfo.sample.temperature_unit}) 934 935 pos = self.create_element("position") 936 written = self.write_node(pos, 937 "x", 938 datainfo.sample.position.x, 939 {"unit": datainfo.sample.position_unit}) 940 written = written | self.write_node( \ 941 pos, "y", datainfo.sample.position.y, 942 {"unit": datainfo.sample.position_unit}) 943 written = written | self.write_node( \ 944 pos, "z", datainfo.sample.position.z, 945 {"unit": datainfo.sample.position_unit}) 946 if written == True: 947 self.append(pos, sample) 948 949 ori = self.create_element("orientation") 950 written = self.write_node(ori, "roll", 951 datainfo.sample.orientation.x, 952 {"unit": datainfo.sample.orientation_unit}) 953 written = written | self.write_node( \ 954 ori, "pitch", datainfo.sample.orientation.y, 955 {"unit": datainfo.sample.orientation_unit}) 956 written = written | self.write_node( \ 957 ori, "yaw", datainfo.sample.orientation.z, 958 {"unit": datainfo.sample.orientation_unit}) 959 if written == True: 960 self.append(ori, sample) 961 962 for item in datainfo.sample.details: 963 self.write_node(sample, "details", item) 964 965 def _write_instrument(self, datainfo, entry_node): 966 """ 967 Writes the instrumental information to the XML file 968 969 :param datainfo: The Data1D object the information is coming from 970 :param entry_node: lxml node ElementTree object to be appended to 971 """ 972 instr = self.create_element("SASinstrument") 973 self.append(instr, entry_node) 974 self.write_node(instr, "name", datainfo.instrument) 975 return instr 976 977 def _write_source(self, datainfo, instr): 978 """ 979 Writes the source information to the XML file 980 981 :param datainfo: The Data1D object the information is coming from 982 :param instr: instrument node to be appended to 983 """ 984 source = self.create_element("SASsource") 985 if datainfo.source.name is not None: 986 self.write_attribute(source, "name", 987 str(datainfo.source.name)) 988 self.append(source, instr) 989 if datainfo.source.radiation == None or datainfo.source.radiation == '': 990 datainfo.source.radiation = "neutron" 991 self.write_node(source, "radiation", datainfo.source.radiation) 992 993 size = self.create_element("beam_size") 994 if datainfo.source.beam_size_name is not None: 995 self.write_attribute(size, "name", 996 str(datainfo.source.beam_size_name)) 997 written = self.write_node( \ 998 size, "x", datainfo.source.beam_size.x, 999 {"unit": datainfo.source.beam_size_unit}) 1000 written = written | self.write_node( \ 1001 size, "y", datainfo.source.beam_size.y, 1002 {"unit": datainfo.source.beam_size_unit}) 1003 written = written | self.write_node( \ 1004 size, "z", datainfo.source.beam_size.z, 1005 {"unit": datainfo.source.beam_size_unit}) 1006 if written == True: 1007 self.append(size, source) 1008 1009 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 1010 self.write_node(source, "wavelength", 1011 datainfo.source.wavelength, 1012 {"unit": datainfo.source.wavelength_unit}) 1013 self.write_node(source, "wavelength_min", 1014 datainfo.source.wavelength_min, 1015 {"unit": datainfo.source.wavelength_min_unit}) 1016 self.write_node(source, "wavelength_max", 1017 datainfo.source.wavelength_max, 1018 {"unit": datainfo.source.wavelength_max_unit}) 1019 self.write_node(source, "wavelength_spread", 1020 datainfo.source.wavelength_spread, 1021 {"unit": datainfo.source.wavelength_spread_unit}) 1022 1023 def _write_collimation(self, datainfo, instr): 1024 """ 1025 Writes the collimation information to the XML file 1026 1027 :param datainfo: The Data1D object the information is coming from 1028 :param instr: lxml node ElementTree object to be appended to 1029 """ 1030 if datainfo.collimation == [] or datainfo.collimation == None: 1031 coll = Collimation() 1032 datainfo.collimation.append(coll) 1033 for item in datainfo.collimation: 1034 coll = self.create_element("SAScollimation") 1035 if item.name is not None: 1036 self.write_attribute(coll, "name", str(item.name)) 1037 self.append(coll, instr) 1038 1039 self.write_node(coll, "length", item.length, 1040 {"unit": item.length_unit}) 1041 1042 for aperture in item.aperture: 1043 apert = self.create_element("aperture") 1044 if aperture.name is not None: 1045 self.write_attribute(apert, "name", str(aperture.name)) 1046 if aperture.type is not None: 1047 self.write_attribute(apert, "type", str(aperture.type)) 1048 self.append(apert, coll) 1049 1050 size = self.create_element("size") 1051 if aperture.size_name is not None: 1052 self.write_attribute(size, "name", 1053 str(aperture.size_name)) 1054 written = self.write_node(size, "x", aperture.size.x, 1055 {"unit": aperture.size_unit}) 1056 written = written | self.write_node( \ 1057 size, "y", aperture.size.y, 1058 {"unit": aperture.size_unit}) 1059 written = written | self.write_node( \ 1060 size, "z", aperture.size.z, 1061 {"unit": aperture.size_unit}) 1062 if written == True: 1063 self.append(size, apert) 1064 1065 self.write_node(apert, "distance", aperture.distance, 1066 {"unit": aperture.distance_unit}) 1067 1068 def _write_detectors(self, datainfo, instr): 1069 """ 1070 Writes the detector information to the XML file 1071 1072 :param datainfo: The Data1D object the information is coming from 1073 :param inst: lxml instrument node to be appended to 1074 """ 1075 if datainfo.detector == None or datainfo.detector == []: 1076 det = Detector() 1077 det.name = "" 1078 datainfo.detector.append(det) 1079 1080 for item in datainfo.detector: 1081 det = self.create_element("SASdetector") 1082 written = self.write_node(det, "name", item.name) 1083 written = written | self.write_node(det, "SDD", item.distance, 1084 {"unit": item.distance_unit}) 1085 if written == True: 1086 self.append(det, instr) 1087 1088 off = self.create_element("offset") 1089 written = self.write_node(off, "x", item.offset.x, 1090 {"unit": item.offset_unit}) 1091 written = written | self.write_node(off, "y", item.offset.y, 1092 {"unit": item.offset_unit}) 1093 written = written | self.write_node(off, "z", item.offset.z, 1094 {"unit": item.offset_unit}) 1095 if written == True: 1096 self.append(off, det) 1097 1098 ori = self.create_element("orientation") 1099 written = self.write_node(ori, "roll", item.orientation.x, 1100 {"unit": item.orientation_unit}) 1101 written = written | self.write_node(ori, "pitch", 1102 item.orientation.y, 1103 {"unit": item.orientation_unit}) 1104 written = written | self.write_node(ori, "yaw", 1105 item.orientation.z, 1106 {"unit": item.orientation_unit}) 1107 if written == True: 1108 self.append(ori, det) 1109 1110 center = self.create_element("beam_center") 1111 written = self.write_node(center, "x", item.beam_center.x, 1112 {"unit": item.beam_center_unit}) 1113 written = written | self.write_node(center, "y", 1114 item.beam_center.y, 1115 {"unit": item.beam_center_unit}) 1116 written = written | self.write_node(center, "z", 1117 item.beam_center.z, 1118 {"unit": item.beam_center_unit}) 1119 if written == True: 1120 self.append(center, det) 1121 1122 pix = self.create_element("pixel_size") 1123 written = self.write_node(pix, "x", item.pixel_size.x, 1124 {"unit": item.pixel_size_unit}) 1125 written = written | self.write_node(pix, "y", item.pixel_size.y, 1126 {"unit": item.pixel_size_unit}) 1127 written = written | self.write_node(pix, "z", item.pixel_size.z, 1128 {"unit": item.pixel_size_unit}) 1129 written = written | self.write_node(det, "slit_length", 1130 item.slit_length, 1131 {"unit": item.slit_length_unit}) 1132 if written == True: 1133 self.append(pix, det) 1134 1135 def _write_process_notes(self, datainfo, entry_node): 1136 """ 1137 Writes the process notes to the XML file 1138 1139 :param datainfo: The Data1D object the information is coming from 1140 :param entry_node: lxml node ElementTree object to be appended to 1141 1142 """ 1143 for item in datainfo.process: 1144 node = self.create_element("SASprocess") 1145 self.append(node, entry_node) 1146 self.write_node(node, "name", item.name) 1147 self.write_node(node, "date", item.date) 1148 self.write_node(node, "description", item.description) 1149 for term in item.term: 1150 value = term['value'] 1151 del term['value'] 1152 self.write_node(node, "term", value, term) 1153 for note in item.notes: 1154 self.write_node(node, "SASprocessnote", note) 1155 if len(item.notes) == 0: 1156 self.write_node(node, "SASprocessnote", "") 1157 1158 def _write_notes(self, datainfo, entry_node): 1159 """ 1160 Writes the notes to the XML file and creates an empty note if none 1161 exist 1162 1163 :param datainfo: The Data1D object the information is coming from 1164 :param entry_node: lxml node ElementTree object to be appended to 1165 1166 """ 1167 if len(datainfo.notes) == 0: 1168 node = self.create_element("SASnote") 1169 self.append(node, entry_node) 1170 else: 1171 for item in datainfo.notes: 1172 node = self.create_element("SASnote") 1173 self.write_text(node, item) 1174 self.append(node, entry_node) 1175 1176 def _check_origin(self, entry_node, doc, frm): 1177 """ 1178 Return the document, and the SASentry node associated with 1179 the data we just wrote. 1180 If the calling function was not the cansas reader, return a minidom 1181 object rather than an lxml object. 1182 1183 :param entry_node: lxml node ElementTree object to be appended to 1184 :param doc: entire xml tree 1185 """ 1186 if not frm: 1187 frm = inspect.stack()[1] 1188 mod_name = frm[1].replace("\\", "/").replace(".pyc", "") 1189 mod_name = mod_name.replace(".py", "") 1190 mod = mod_name.split("sas/") 1191 mod_name = mod[1] 1192 if mod_name != "sascalc/dataloader/readers/cansas_reader": 1193 string = self.to_string(doc, pretty_print=False) 1194 doc = parseString(string) 1195 node_name = entry_node.tag 1196 node_list = doc.getElementsByTagName(node_name) 1197 entry_node = node_list.item(0) 1198 return doc, entry_node 1113 1199 1114 1200 # DO NOT REMOVE - used in saving and loading panel states. … … 1195 1281 if entry is not None and entry.text is not None: 1196 1282 exec "storage.%s = entry.text.strip()" % variable 1283 1284 1285 # DO NOT REMOVE Called by outside packages: 1286 # sas.sasgui.perspectives.invariant.invariant_state 1287 # sas.sasgui.perspectives.fitting.pagestate 1288 def get_content(location, node): 1289 """ 1290 Get the first instance of the content of a xpath location. 1291 1292 :param location: xpath location 1293 :param node: node to start at 1294 1295 :return: Element, or None 1296 """ 1297 nodes = node.xpath(location, 1298 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")}) 1299 if len(nodes) > 0: 1300 return nodes[0] 1301 else: 1302 return None 1303 1304 # DO NOT REMOVE Called by outside packages: 1305 # sas.sasgui.perspectives.fitting.pagestate 1306 def write_node(doc, parent, name, value, attr=None): 1307 """ 1308 :param doc: document DOM 1309 :param parent: parent node 1310 :param name: tag of the element 1311 :param value: value of the child text node 1312 :param attr: attribute dictionary 1313 1314 :return: True if something was appended, otherwise False 1315 """ 1316 if attr is None: 1317 attr = {} 1318 if value is not None: 1319 node = doc.createElement(name) 1320 node.appendChild(doc.createTextNode(str(value))) 1321 for item in attr: 1322 node.setAttribute(item, attr[item]) 1323 parent.appendChild(node) 1324 return True 1325 return False
Note: See TracChangeset
for help on using the changeset viewer.