[7d6351e] | 1 | """ |
---|
[19b628f] | 2 | CanSAS data reader - new recursive cansasVersion. |
---|
[7d6351e] | 3 | """ |
---|
[0997158f] | 4 | ############################################################################ |
---|
| 5 | #This software was developed by the University of Tennessee as part of the |
---|
| 6 | #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 7 | #project funded by the US National Science Foundation. |
---|
| 8 | #If you use DANSE applications to do scientific research that leads to |
---|
| 9 | #publication, we ask that you acknowledge the use of the software with the |
---|
| 10 | #following sentence: |
---|
| 11 | #This work benefited from DANSE software developed under NSF award DMR-0520547. |
---|
| 12 | #copyright 2008,2009 University of Tennessee |
---|
| 13 | ############################################################################# |
---|
| 14 | |
---|
[8780e9a] | 15 | import logging |
---|
| 16 | import numpy |
---|
[a7a5886] | 17 | import os |
---|
| 18 | import sys |
---|
[ad8034f] | 19 | from sans.dataloader.data_info import Data1D |
---|
| 20 | from sans.dataloader.data_info import Collimation |
---|
| 21 | from sans.dataloader.data_info import Detector |
---|
| 22 | from sans.dataloader.data_info import Process |
---|
| 23 | from sans.dataloader.data_info import Aperture |
---|
[19b628f] | 24 | import xml_reader |
---|
[b0d0723] | 25 | import xml.dom.minidom |
---|
[19b628f] | 26 | from cansas_constants import cansasConstants |
---|
| 27 | |
---|
[da96629] | 28 | _ZERO = 1e-16 |
---|
[5a0dac1f] | 29 | HAS_CONVERTER = True |
---|
[b39c817] | 30 | try: |
---|
[ffbe487] | 31 | from sans.data_util.nxsunit import Converter |
---|
[b39c817] | 32 | except: |
---|
[5a0dac1f] | 33 | HAS_CONVERTER = False |
---|
[19b628f] | 34 | |
---|
| 35 | CANSAS_FORMAT = cansasConstants.CANSAS_FORMAT |
---|
| 36 | CANSAS_NS = cansasConstants.CANSAS_NS |
---|
[5a0dac1f] | 37 | ALLOW_ALL = True |
---|
[b0d0723] | 38 | |
---|
[7d6351e] | 39 | |
---|
[4c00964] | 40 | def write_node(doc, parent, name, value, attr={}): |
---|
| 41 | """ |
---|
[0997158f] | 42 | :param doc: document DOM |
---|
| 43 | :param parent: parent node |
---|
| 44 | :param name: tag of the element |
---|
| 45 | :param value: value of the child text node |
---|
| 46 | :param attr: attribute dictionary |
---|
| 47 | |
---|
| 48 | :return: True if something was appended, otherwise False |
---|
[4c00964] | 49 | """ |
---|
| 50 | if value is not None: |
---|
| 51 | node = doc.createElement(name) |
---|
| 52 | node.appendChild(doc.createTextNode(str(value))) |
---|
| 53 | for item in attr: |
---|
| 54 | node.setAttribute(item, attr[item]) |
---|
| 55 | parent.appendChild(node) |
---|
| 56 | return True |
---|
| 57 | return False |
---|
[19b628f] | 58 | |
---|
[7d6351e] | 59 | |
---|
[8780e9a] | 60 | def get_content(location, node): |
---|
| 61 | """ |
---|
[0997158f] | 62 | Get the first instance of the content of a xpath location. |
---|
| 63 | |
---|
| 64 | :param location: xpath location |
---|
| 65 | :param node: node to start at |
---|
| 66 | |
---|
| 67 | :return: Element, or None |
---|
[8780e9a] | 68 | """ |
---|
[b0d0723] | 69 | nodes = node.xpath(location, namespaces={'ns': CANSAS_NS}) |
---|
| 70 | |
---|
[7d6351e] | 71 | if len(nodes) > 0: |
---|
[b0d0723] | 72 | return nodes[0] |
---|
| 73 | else: |
---|
| 74 | return None |
---|
[8780e9a] | 75 | |
---|
[7d6351e] | 76 | |
---|
[8780e9a] | 77 | def get_float(location, node): |
---|
| 78 | """ |
---|
[7d6351e] | 79 | Get the content of a node as a float |
---|
[0997158f] | 80 | |
---|
| 81 | :param location: xpath location |
---|
| 82 | :param node: node to start at |
---|
[8780e9a] | 83 | """ |
---|
[b0d0723] | 84 | nodes = node.xpath(location, namespaces={'ns': CANSAS_NS}) |
---|
| 85 | |
---|
[8780e9a] | 86 | value = None |
---|
| 87 | attr = {} |
---|
[a7a5886] | 88 | if len(nodes) > 0: |
---|
[8780e9a] | 89 | try: |
---|
[7d6351e] | 90 | value = float(nodes[0].text) |
---|
[8780e9a] | 91 | except: |
---|
| 92 | # Could not pass, skip and return None |
---|
[a7a5886] | 93 | msg = "cansas_reader.get_float: could not " |
---|
| 94 | msg += " convert '%s' to float" % nodes[0].text |
---|
| 95 | logging.error(msg) |
---|
[b0d0723] | 96 | if nodes[0].get('unit') is not None: |
---|
| 97 | attr['unit'] = nodes[0].get('unit') |
---|
[8780e9a] | 98 | return value, attr |
---|
| 99 | |
---|
[19b628f] | 100 | |
---|
| 101 | |
---|
| 102 | class CANSASError(Exception): |
---|
| 103 | """Base class all CANSAS reader exceptions are derived""" |
---|
| 104 | pass |
---|
| 105 | |
---|
| 106 | class NotCANSASFileError(CANSASError): |
---|
| 107 | def __init__(self): |
---|
| 108 | self.value = "This is not a proper CanSAS file." |
---|
| 109 | def __str__(self): |
---|
| 110 | return repr(self.value) |
---|
| 111 | |
---|
| 112 | class Reader(): |
---|
[8780e9a] | 113 | """ |
---|
[0997158f] | 114 | Class to load cansas 1D XML files |
---|
| 115 | |
---|
| 116 | :Dependencies: |
---|
[19b628f] | 117 | The CanSAS reader requires PyXML 0.8.4 or later. |
---|
[8780e9a] | 118 | """ |
---|
[19b628f] | 119 | ##CanSAS version - defaults to version 1.0 |
---|
| 120 | cansasVersion = "1.0" |
---|
| 121 | ##Data reader |
---|
| 122 | reader = xml_reader.XMLreader() |
---|
| 123 | errors = [] |
---|
| 124 | |
---|
| 125 | type_name = "canSAS" |
---|
| 126 | |
---|
[28caa03] | 127 | ## Wildcards |
---|
[19b628f] | 128 | type = ["XML files (*.xml)|*.xml"] |
---|
[8780e9a] | 129 | ## List of allowed extensions |
---|
[19b628f] | 130 | ext = ['.xml', '.XML'] |
---|
| 131 | |
---|
| 132 | ## Flag to bypass extension check |
---|
| 133 | allow_all = True |
---|
[8780e9a] | 134 | |
---|
[fe78c7b] | 135 | def __init__(self): |
---|
| 136 | ## List of errors |
---|
| 137 | self.errors = [] |
---|
[19b628f] | 138 | |
---|
| 139 | def isCansas(self): |
---|
| 140 | """ |
---|
| 141 | Checks to see if the xml file is a CanSAS file |
---|
| 142 | """ |
---|
| 143 | if self.reader.validateXML(): |
---|
| 144 | xmlns = self.reader.xmlroot.keys() |
---|
| 145 | if (CANSAS_NS.get(self.cansasVersion).get("ns") == self.reader.xmlroot.get(xmlns[1]).rsplit(" ")[0]): |
---|
| 146 | return True |
---|
| 147 | return False |
---|
[fe78c7b] | 148 | |
---|
[19b628f] | 149 | def read(self, xml): |
---|
[7d6351e] | 150 | """ |
---|
[19b628f] | 151 | Validate and read in an xml file in the canSAS format. |
---|
[0997158f] | 152 | |
---|
[19b628f] | 153 | :param xml: A canSAS file path in proper XML format |
---|
[8780e9a] | 154 | """ |
---|
[19b628f] | 155 | # X - Q value; Y - Intensity (Abs) |
---|
| 156 | x = numpy.empty(0) |
---|
| 157 | y = numpy.empty(0) |
---|
| 158 | dx = numpy.empty(0) |
---|
| 159 | dy = numpy.empty(0) |
---|
| 160 | dxl = numpy.empty(0) |
---|
| 161 | dxw = numpy.empty(0) |
---|
| 162 | |
---|
| 163 | # output - Final list of Data1D objects |
---|
[8780e9a] | 164 | output = [] |
---|
[19b628f] | 165 | # ns - Namespace hierarchy for current xml object |
---|
| 166 | ns = [] |
---|
| 167 | |
---|
| 168 | # Check that the file exists |
---|
| 169 | if os.path.isfile(xml): |
---|
| 170 | basename = os.path.basename(xml) |
---|
| 171 | _, extension = os.path.splitext(basename) |
---|
| 172 | # If the fiel type is not allowed, return nothing |
---|
| 173 | if extension in self.ext or self.allow_all: |
---|
| 174 | base_name = xml_reader.__file__ |
---|
[1ce36f37] | 175 | base_name = base_name.replace("\\","/") |
---|
| 176 | base = base_name.split("/sans/")[0] |
---|
[19b628f] | 177 | |
---|
| 178 | # Load in the xml file and get the cansas version from the header |
---|
| 179 | self.reader.setXMLFile(xml) |
---|
| 180 | root = self.reader.xmlroot |
---|
| 181 | if root is None: |
---|
| 182 | root = {} |
---|
| 183 | self.cansasVersion = root.get("version", "1.0") |
---|
| 184 | |
---|
| 185 | # Generic values for the cansas file based on the version |
---|
| 186 | cansas_defaults = CANSAS_NS.get(self.cansasVersion, "1.0") |
---|
[1ce36f37] | 187 | schema_path = "{0}/sans/dataloader/readers/schema/{1}".format(base, cansas_defaults.get("schema")).replace("\\", "/") |
---|
[19b628f] | 188 | |
---|
| 189 | # Link a schema to the XML file. |
---|
| 190 | self.reader.setSchema(schema_path) |
---|
| 191 | |
---|
| 192 | # Try to load the file, but raise an error if unable to. |
---|
| 193 | # Check the file matches the XML schema |
---|
[0a5c8f5] | 194 | try: |
---|
[19b628f] | 195 | if self.isCansas(): |
---|
| 196 | # Get each SASentry from the XML file and add it to a list. |
---|
| 197 | entry_list = root.xpath('/ns:SASroot/ns:SASentry', |
---|
| 198 | namespaces={'ns': cansas_defaults.get("ns")}) |
---|
| 199 | ns.append("SASentry") |
---|
[0a5c8f5] | 200 | |
---|
[19b628f] | 201 | # If there are multiple files, modify the name for each is unique |
---|
| 202 | multipleFiles = len(entry_list) - 1 |
---|
| 203 | n = 0 |
---|
| 204 | name = basename |
---|
| 205 | # Parse each SASentry item |
---|
| 206 | for entry in entry_list: |
---|
| 207 | |
---|
| 208 | # Define a new Data1D object with zeroes for x and y |
---|
| 209 | data1D = Data1D(x,y,dx,dy) |
---|
| 210 | data1D.dxl = dxl |
---|
| 211 | data1D.dxw = dxw |
---|
| 212 | |
---|
| 213 | # If more than one SASentry, number each in order |
---|
| 214 | if multipleFiles: |
---|
| 215 | name += "_{0}".format(n) |
---|
| 216 | n += 1 |
---|
| 217 | |
---|
| 218 | # Set the Data1D name and then parse the entry. The entry is appended to a list of entry values |
---|
| 219 | data1D.filename = name |
---|
| 220 | data1D.meta_data["loader"] = "CanSAS 1D" |
---|
| 221 | return_value, extras = self._parse_entry(entry, ns, data1D) |
---|
| 222 | del extras[:] |
---|
| 223 | |
---|
| 224 | #Final cleanup - Remove empty nodes, verify array sizes are correct |
---|
| 225 | for error in self.errors: |
---|
| 226 | return_value.errors.append(error) |
---|
| 227 | del self.errors[:] |
---|
| 228 | numpy.trim_zeros(return_value.x) |
---|
| 229 | numpy.trim_zeros(return_value.y) |
---|
| 230 | numpy.trim_zeros(return_value.dy) |
---|
| 231 | size_dx = return_value.dx.size |
---|
| 232 | size_dxl = return_value.dxl.size |
---|
| 233 | size_dxw = return_value.dxw.size |
---|
| 234 | if size_dxl == 0 and size_dxw == 0: |
---|
| 235 | return_value.dxl = None |
---|
| 236 | return_value.dxw = None |
---|
| 237 | numpy.trim_zeros(return_value.dx) |
---|
| 238 | elif size_dx == 0: |
---|
| 239 | return_value.dx = None |
---|
| 240 | size_dx = size_dxl |
---|
| 241 | numpy.trim_zeros(return_value.dxl) |
---|
| 242 | numpy.trim_zeros(return_value.dxw) |
---|
| 243 | |
---|
| 244 | output.append(return_value) |
---|
| 245 | else: |
---|
| 246 | value = self.reader.findInvalidXML() |
---|
| 247 | output.append("Invalid XML at: {0}".format(value)) |
---|
[0a5c8f5] | 248 | except: |
---|
[19b628f] | 249 | # If the file does not match the schema, raise this error |
---|
| 250 | raise RuntimeError, "%s cannot be read \n" % xml |
---|
| 251 | return output |
---|
| 252 | # Return a list of parsed entries that dataloader can manage |
---|
| 253 | return None |
---|
| 254 | |
---|
| 255 | def _create_unique_key(self, dictionary, name, i): |
---|
| 256 | if dictionary.get(name) is not None: |
---|
| 257 | i += 1 |
---|
| 258 | name = name.split("_")[0] |
---|
| 259 | name += "_{0}".format(i) |
---|
| 260 | name = self._create_unique_key(dictionary, name, i) |
---|
| 261 | return name |
---|
| 262 | |
---|
| 263 | def _iterate_namespace(self, ns): |
---|
| 264 | # The current level to look through in cansas_constants. |
---|
| 265 | current_level = CANSAS_FORMAT.get("SASentry") |
---|
| 266 | # Defaults for variable and datatype |
---|
| 267 | ns_variable = "{0}.meta_data[\"{2}\"] = \"{1}\"" |
---|
| 268 | ns_datatype = "content" |
---|
| 269 | ns_optional = True |
---|
| 270 | for name in ns: |
---|
| 271 | if name != "SASentry": |
---|
| 272 | current_level = current_level.get("children").get(name, "") |
---|
| 273 | if current_level == "": |
---|
| 274 | current_level = current_level.get("<any>", "") |
---|
| 275 | cl_variable = current_level.get("variable", "") |
---|
| 276 | cl_datatype = current_level.get("storeas", "") |
---|
| 277 | cl_units_optional = current_level.get("units_required", "") |
---|
| 278 | # Where are how to store the variable for the given namespace |
---|
| 279 | # The CANSAS_CONSTANTS tree is hierarchical, so is no value, inherit |
---|
| 280 | ns_variable = cl_variable if cl_variable != "" else ns_variable |
---|
| 281 | ns_datatype = cl_datatype if cl_datatype != "" else ns_datatype |
---|
| 282 | ns_optional = cl_units_optional if cl_units_optional != ns_optional else ns_optional |
---|
| 283 | return current_level, ns_variable, ns_datatype, ns_optional |
---|
| 284 | |
---|
| 285 | def _unit_conversion(self, new_current_level, attr, data1D, node_value, optional = True): |
---|
| 286 | value_unit = '' |
---|
| 287 | if 'unit' in attr and new_current_level.get('unit') is not None: |
---|
[8780e9a] | 288 | try: |
---|
[19b628f] | 289 | if isinstance(node_value, float) is False: |
---|
| 290 | exec("node_value = float({0})".format(node_value)) |
---|
| 291 | default_unit = None |
---|
| 292 | unitname = new_current_level.get("unit") |
---|
| 293 | exec "default_unit = data1D.{0}".format(unitname) |
---|
| 294 | local_unit = attr['unit'] |
---|
| 295 | if local_unit.lower() != default_unit.lower() and local_unit is not None\ |
---|
| 296 | and local_unit.lower() != "none" and default_unit is not None: |
---|
| 297 | if HAS_CONVERTER == True: |
---|
| 298 | try: |
---|
| 299 | data_conv_q = Converter(attr['unit']) |
---|
| 300 | value_unit = default_unit |
---|
| 301 | exec "node_value = data_conv_q(node_value, units=data1D.{0})".format(unitname) |
---|
| 302 | except: |
---|
| 303 | err_msg = "CanSAS reader: could not convert " |
---|
| 304 | err_msg += "Q unit {0}; ".format(local_unit) |
---|
| 305 | intermediate = "err_msg += \"expecting [{1}] {2}\".format(data1D.{0}, sys.exc_info()[1])".format(unitname, "{0}", "{1}") |
---|
| 306 | exec intermediate |
---|
| 307 | self.errors.append(err_msg) |
---|
| 308 | if optional: |
---|
| 309 | logging.info(err_msg) |
---|
| 310 | else: |
---|
| 311 | raise ValueError, err_msg |
---|
| 312 | else: |
---|
| 313 | value_unit = local_unit |
---|
| 314 | err_msg = "CanSAS reader: unrecognized %s unit [%s];"\ |
---|
| 315 | % (node_value, default_unit) |
---|
| 316 | err_msg += " expecting [%s]" % local_unit |
---|
| 317 | self.errors.append(err_msg) |
---|
| 318 | if optional: |
---|
| 319 | logging.info(err_msg) |
---|
| 320 | else: |
---|
| 321 | raise ValueError, err_msg |
---|
| 322 | else: |
---|
| 323 | value_unit = local_unit |
---|
[8780e9a] | 324 | except: |
---|
[19b628f] | 325 | err_msg = "CanSAS reader: could not convert " |
---|
| 326 | err_msg += "Q unit [%s]; " % attr['unit'], |
---|
| 327 | exec "err_msg += \"expecting [%s]\n %s\" % (data1D.{0}, sys.exc_info()[1])".format(unitname) |
---|
| 328 | self.errors.append(err_msg) |
---|
| 329 | if optional: |
---|
| 330 | logging.info(err_msg) |
---|
| 331 | else: |
---|
| 332 | raise ValueError, err_msg |
---|
| 333 | elif 'unit' in attr: |
---|
| 334 | value_unit = attr['unit'] |
---|
| 335 | node_value = "float({0})".format(node_value) |
---|
| 336 | return node_value, value_unit |
---|
| 337 | |
---|
| 338 | def _parse_entry(self, dom, ns, data1D, extras = []): |
---|
| 339 | """ |
---|
| 340 | Parse a SASEntry - new recursive method for parsing the dom of |
---|
| 341 | the CanSAS data format. This will allow multiple data files |
---|
| 342 | and extra nodes to be read in simultaneously. |
---|
[8780e9a] | 343 | |
---|
[19b628f] | 344 | :param dom: dom object with a namespace base of ns |
---|
| 345 | :param ns: A list of element names that lead up to the dom object |
---|
| 346 | :param data1D: The data1D object that will be modified |
---|
| 347 | """ |
---|
| 348 | |
---|
| 349 | # A portion of every namespace entry |
---|
| 350 | base_ns = "{0}{1}{2}".format("{", CANSAS_NS.get(self.cansasVersion).get("ns"), "}") |
---|
| 351 | unit = '' |
---|
[b0d0723] | 352 | |
---|
[19b628f] | 353 | # Go through each child in the parent element |
---|
| 354 | for node in dom: |
---|
[8780e9a] | 355 | try: |
---|
[19b628f] | 356 | # Get the element name and set the current ns level |
---|
| 357 | tagname = node.tag.replace(base_ns, "") |
---|
| 358 | tagname_original = tagname |
---|
| 359 | ns.append(tagname) |
---|
| 360 | attr = node.attrib |
---|
[579ba85] | 361 | |
---|
[19b628f] | 362 | # Look for special cases |
---|
| 363 | save_data1D = data1D |
---|
| 364 | if tagname == "SASdetector": |
---|
| 365 | data1D = Detector() |
---|
| 366 | elif tagname == "SAScollimation": |
---|
| 367 | data1D = Collimation() |
---|
| 368 | elif tagname == "SASprocess": |
---|
| 369 | data1D = Process() |
---|
| 370 | for child in node: |
---|
| 371 | if child.tag.replace(base_ns, "") == "term": |
---|
| 372 | term_attr = {} |
---|
| 373 | for attr in child.keys(): |
---|
| 374 | term_attr[attr] = ' '.join(child.get(attr).split()) |
---|
| 375 | if child.text is not None: |
---|
| 376 | term_attr['value'] = ' '.join(child.text.split()) |
---|
| 377 | data1D.term.append(term_attr) |
---|
| 378 | elif tagname == "aperture": |
---|
| 379 | data1D = Aperture() |
---|
[8780e9a] | 380 | |
---|
[19b628f] | 381 | # Get where to store content |
---|
| 382 | new_current_level, ns_variable, ns_datatype, optional = self._iterate_namespace(ns) |
---|
| 383 | # If the element is a child element, recurse |
---|
| 384 | if node.getchildren() is not None: |
---|
| 385 | # Returned value is new Data1D object with all previous and new values in it. |
---|
| 386 | data1D, extras = self._parse_entry(node, ns, data1D, extras) |
---|
| 387 | |
---|
| 388 | #Get the information from the node |
---|
| 389 | node_value = node.text |
---|
| 390 | if node_value == "": |
---|
| 391 | node_value = None |
---|
| 392 | if node_value is not None: |
---|
| 393 | node_value = ' '.join(node_value.split()) |
---|
[8780e9a] | 394 | |
---|
[19b628f] | 395 | # If the value is a float, compile with units. |
---|
| 396 | if ns_datatype == "float": |
---|
| 397 | # If an empty value is given, store as zero. |
---|
| 398 | if node_value is None or node_value.isspace() or node_value.lower() == "nan": |
---|
| 399 | node_value = "0.0" |
---|
| 400 | node_value, unit = self._unit_conversion(new_current_level, attr, data1D, node_value, optional) |
---|
| 401 | |
---|
| 402 | # If appending to a dictionary (meta_data | run_name), name sure the key is unique |
---|
| 403 | if ns_variable == "{0}.meta_data[\"{2}\"] = \"{1}\"": |
---|
| 404 | # If we are within a Process, Detector, Collimation or Aperture instance, pull out old data1D |
---|
| 405 | tagname = self._create_unique_key(data1D.meta_data, tagname, 0) |
---|
| 406 | if isinstance(data1D, Data1D) == False: |
---|
| 407 | store_me = ns_variable.format("data1D", node_value, tagname) |
---|
| 408 | extras.append(store_me) |
---|
| 409 | ns_variable = None |
---|
| 410 | if ns_variable == "{0}.run_name[\"{2}\"] = \"{1}\"": |
---|
| 411 | tagname = self._create_unique_key(data1D.run_name, tagname, 0) |
---|
[8780e9a] | 412 | |
---|
[19b628f] | 413 | # Check for Data1D object and any extra commands to save |
---|
| 414 | if isinstance(data1D, Data1D): |
---|
| 415 | for item in extras: |
---|
| 416 | exec item |
---|
| 417 | # Don't bother saving empty information unless it is a float |
---|
| 418 | if ns_variable is not None and node_value is not None and node_value.isspace() == False: |
---|
| 419 | # Format a string and then execute it. |
---|
| 420 | store_me = ns_variable.format("data1D", node_value, tagname) |
---|
| 421 | exec store_me |
---|
| 422 | # Get attributes and process them |
---|
| 423 | if attr is not None: |
---|
| 424 | for key in node.keys(): |
---|
| 425 | try: |
---|
| 426 | cansas_attrib = new_current_level.get("attributes").get(key) |
---|
| 427 | attrib_variable = cansas_attrib.get("variable") |
---|
| 428 | if key == 'unit' and unit != '': |
---|
| 429 | attrib_value = unit |
---|
| 430 | else: |
---|
| 431 | attrib_value = node.attrib[key] |
---|
| 432 | store_attr = attrib_variable.format("data1D", attrib_value, key) |
---|
| 433 | exec store_attr |
---|
| 434 | except AttributeError as e: |
---|
[7d6351e] | 435 | pass |
---|
[19b628f] | 436 | |
---|
| 437 | |
---|
| 438 | except Exception as e: |
---|
| 439 | exc_type, exc_obj, exc_tb = sys.exc_info() |
---|
| 440 | fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] |
---|
| 441 | print(e, exc_type, fname, exc_tb.tb_lineno, tagname, exc_obj) |
---|
| 442 | finally: |
---|
| 443 | # Save special cases in original data1D object and then restore the data1D |
---|
| 444 | if tagname_original == "SASdetector": |
---|
| 445 | save_data1D.detector.append(data1D) |
---|
| 446 | elif tagname_original == "SAScollimation": |
---|
| 447 | save_data1D.collimation.append(data1D) |
---|
| 448 | elif tagname_original == "SASprocess": |
---|
| 449 | save_data1D.process.append(data1D) |
---|
| 450 | elif tagname_original == "aperture": |
---|
| 451 | save_data1D.aperture.append(data1D) |
---|
[e390933] | 452 | else: |
---|
[19b628f] | 453 | save_data1D = data1D |
---|
| 454 | data1D = save_data1D |
---|
| 455 | # Remove tagname from ns to restore original base |
---|
| 456 | ns.remove(tagname_original) |
---|
[d6513cd] | 457 | |
---|
[19b628f] | 458 | return data1D, extras |
---|
[d6513cd] | 459 | |
---|
[b3de3a45] | 460 | def _to_xml_doc(self, datainfo): |
---|
[4c00964] | 461 | """ |
---|
[0997158f] | 462 | Create an XML document to contain the content of a Data1D |
---|
| 463 | |
---|
| 464 | :param datainfo: Data1D object |
---|
[4c00964] | 465 | """ |
---|
| 466 | |
---|
[7d8094b] | 467 | if not issubclass(datainfo.__class__, Data1D): |
---|
[4c00964] | 468 | raise RuntimeError, "The cansas writer expects a Data1D instance" |
---|
| 469 | |
---|
[19b628f] | 470 | ns = CANSAS_NS.get(self.cansasVersion).get("ns") |
---|
[4c00964] | 471 | doc = xml.dom.minidom.Document() |
---|
| 472 | main_node = doc.createElement("SASroot") |
---|
[19b628f] | 473 | main_node.setAttribute("version", self.cansasVersion) |
---|
| 474 | main_node.setAttribute("xmlns", ns) |
---|
[a7a5886] | 475 | main_node.setAttribute("xmlns:xsi", |
---|
| 476 | "http://www.w3.org/2001/XMLSchema-instance") |
---|
| 477 | main_node.setAttribute("xsi:schemaLocation", |
---|
[19b628f] | 478 | "{0} http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd".format(ns)) |
---|
[fee780b] | 479 | |
---|
[4c00964] | 480 | doc.appendChild(main_node) |
---|
| 481 | |
---|
| 482 | entry_node = doc.createElement("SASentry") |
---|
| 483 | main_node.appendChild(entry_node) |
---|
| 484 | |
---|
[579ba85] | 485 | write_node(doc, entry_node, "Title", datainfo.title) |
---|
| 486 | for item in datainfo.run: |
---|
| 487 | runname = {} |
---|
[7d6351e] | 488 | if item in datainfo.run_name and \ |
---|
| 489 | len(str(datainfo.run_name[item])) > 1: |
---|
| 490 | runname = {'name': datainfo.run_name[item]} |
---|
[579ba85] | 491 | write_node(doc, entry_node, "Run", item, runname) |
---|
[4c00964] | 492 | |
---|
| 493 | # Data info |
---|
| 494 | node = doc.createElement("SASdata") |
---|
| 495 | entry_node.appendChild(node) |
---|
| 496 | |
---|
[579ba85] | 497 | for i in range(len(datainfo.x)): |
---|
| 498 | pt = doc.createElement("Idata") |
---|
| 499 | node.appendChild(pt) |
---|
[7d6351e] | 500 | write_node(doc, pt, "Q", datainfo.x[i], {'unit': datainfo.x_unit}) |
---|
| 501 | if len(datainfo.y) >= i: |
---|
[a7a5886] | 502 | write_node(doc, pt, "I", datainfo.y[i], |
---|
[7d6351e] | 503 | {'unit': datainfo.y_unit}) |
---|
[19b628f] | 504 | if datainfo.dy != None and len(datainfo.dy) >= i: |
---|
| 505 | write_node(doc, pt, "Idev", datainfo.dy[i], |
---|
| 506 | {'unit': datainfo.y_unit}) |
---|
[0d8642c9] | 507 | if datainfo.dx != None and len(datainfo.dx) >= i: |
---|
[a7a5886] | 508 | write_node(doc, pt, "Qdev", datainfo.dx[i], |
---|
[7d6351e] | 509 | {'unit': datainfo.x_unit}) |
---|
[0d8642c9] | 510 | if datainfo.dxw != None and len(datainfo.dxw) >= i: |
---|
| 511 | write_node(doc, pt, "dQw", datainfo.dxw[i], |
---|
[7d6351e] | 512 | {'unit': datainfo.x_unit}) |
---|
[19b628f] | 513 | if datainfo.dxl != None and len(datainfo.dxl) >= i: |
---|
| 514 | write_node(doc, pt, "dQl", datainfo.dxl[i], |
---|
| 515 | {'unit': datainfo.x_unit}) |
---|
| 516 | |
---|
| 517 | # Transmission Spectrum Info |
---|
| 518 | if len(datainfo.trans_spectrum.wavelength) > 0: |
---|
| 519 | node = doc.createElement("SAStransmission_spectrum") |
---|
| 520 | entry_node.appendChild(node) |
---|
| 521 | for i in range(len(datainfo.trans_spectrum.wavelength)): |
---|
| 522 | pt = doc.createElement("Tdata") |
---|
| 523 | node.appendChild(pt) |
---|
| 524 | write_node(doc, pt, "Lambda", datainfo.trans_spectrum.wavelength[i], |
---|
| 525 | {'unit': datainfo.trans_spectrum.wavelength_unit}) |
---|
| 526 | write_node(doc, pt, "T", datainfo.trans_spectrum.transmission[i], |
---|
| 527 | {'unit': datainfo.trans_spectrum.transmission_unit}) |
---|
| 528 | if datainfo.trans_spectrum.transmission_deviation != None \ |
---|
| 529 | and len(datainfo.trans_spectrum.transmission_deviation) >= i: |
---|
| 530 | write_node(doc, pt, "Tdev", datainfo.trans_spectrum.transmission_deviation[i], |
---|
| 531 | {'unit': datainfo.trans_spectrum.transmission_deviation_unit}) |
---|
[579ba85] | 532 | |
---|
[4c00964] | 533 | # Sample info |
---|
| 534 | sample = doc.createElement("SASsample") |
---|
[579ba85] | 535 | if datainfo.sample.name is not None: |
---|
| 536 | sample.setAttribute("name", str(datainfo.sample.name)) |
---|
[4c00964] | 537 | entry_node.appendChild(sample) |
---|
[579ba85] | 538 | write_node(doc, sample, "ID", str(datainfo.sample.ID)) |
---|
[a7a5886] | 539 | write_node(doc, sample, "thickness", datainfo.sample.thickness, |
---|
[7d6351e] | 540 | {"unit": datainfo.sample.thickness_unit}) |
---|
[4c00964] | 541 | write_node(doc, sample, "transmission", datainfo.sample.transmission) |
---|
[a7a5886] | 542 | write_node(doc, sample, "temperature", datainfo.sample.temperature, |
---|
[7d6351e] | 543 | {"unit": datainfo.sample.temperature_unit}) |
---|
[4c00964] | 544 | |
---|
| 545 | pos = doc.createElement("position") |
---|
[a7a5886] | 546 | written = write_node(doc, pos, "x", datainfo.sample.position.x, |
---|
[7d6351e] | 547 | {"unit": datainfo.sample.position_unit}) |
---|
[a7a5886] | 548 | written = written | write_node(doc, pos, "y", |
---|
| 549 | datainfo.sample.position.y, |
---|
[7d6351e] | 550 | {"unit": datainfo.sample.position_unit}) |
---|
[a7a5886] | 551 | written = written | write_node(doc, pos, "z", |
---|
| 552 | datainfo.sample.position.z, |
---|
[7d6351e] | 553 | {"unit": datainfo.sample.position_unit}) |
---|
[4c00964] | 554 | if written == True: |
---|
| 555 | sample.appendChild(pos) |
---|
| 556 | |
---|
| 557 | ori = doc.createElement("orientation") |
---|
[a7a5886] | 558 | written = write_node(doc, ori, "roll", |
---|
| 559 | datainfo.sample.orientation.x, |
---|
[7d6351e] | 560 | {"unit": datainfo.sample.orientation_unit}) |
---|
[a7a5886] | 561 | written = written | write_node(doc, ori, "pitch", |
---|
| 562 | datainfo.sample.orientation.y, |
---|
[7d6351e] | 563 | {"unit": datainfo.sample.orientation_unit}) |
---|
[a7a5886] | 564 | written = written | write_node(doc, ori, "yaw", |
---|
| 565 | datainfo.sample.orientation.z, |
---|
[7d6351e] | 566 | {"unit": datainfo.sample.orientation_unit}) |
---|
[4c00964] | 567 | if written == True: |
---|
| 568 | sample.appendChild(ori) |
---|
| 569 | |
---|
[19b628f] | 570 | for item in datainfo.sample.details: |
---|
| 571 | write_node(doc, sample, "details", item) |
---|
| 572 | |
---|
[4c00964] | 573 | # Instrument info |
---|
| 574 | instr = doc.createElement("SASinstrument") |
---|
| 575 | entry_node.appendChild(instr) |
---|
| 576 | |
---|
| 577 | write_node(doc, instr, "name", datainfo.instrument) |
---|
| 578 | |
---|
| 579 | # Source |
---|
| 580 | source = doc.createElement("SASsource") |
---|
[579ba85] | 581 | if datainfo.source.name is not None: |
---|
| 582 | source.setAttribute("name", str(datainfo.source.name)) |
---|
[4c00964] | 583 | instr.appendChild(source) |
---|
| 584 | write_node(doc, source, "radiation", datainfo.source.radiation) |
---|
[19b628f] | 585 | |
---|
[579ba85] | 586 | size = doc.createElement("beam_size") |
---|
| 587 | if datainfo.source.beam_size_name is not None: |
---|
| 588 | size.setAttribute("name", str(datainfo.source.beam_size_name)) |
---|
[a7a5886] | 589 | written = write_node(doc, size, "x", datainfo.source.beam_size.x, |
---|
[7d6351e] | 590 | {"unit": datainfo.source.beam_size_unit}) |
---|
[a7a5886] | 591 | written = written | write_node(doc, size, "y", |
---|
| 592 | datainfo.source.beam_size.y, |
---|
[7d6351e] | 593 | {"unit": datainfo.source.beam_size_unit}) |
---|
[a7a5886] | 594 | written = written | write_node(doc, size, "z", |
---|
| 595 | datainfo.source.beam_size.z, |
---|
[7d6351e] | 596 | {"unit": datainfo.source.beam_size_unit}) |
---|
[579ba85] | 597 | if written == True: |
---|
| 598 | source.appendChild(size) |
---|
| 599 | |
---|
[19b628f] | 600 | write_node(doc, source, "beam_shape", datainfo.source.beam_shape) |
---|
[a7a5886] | 601 | write_node(doc, source, "wavelength", |
---|
| 602 | datainfo.source.wavelength, |
---|
[7d6351e] | 603 | {"unit": datainfo.source.wavelength_unit}) |
---|
[a7a5886] | 604 | write_node(doc, source, "wavelength_min", |
---|
| 605 | datainfo.source.wavelength_min, |
---|
[7d6351e] | 606 | {"unit": datainfo.source.wavelength_min_unit}) |
---|
[a7a5886] | 607 | write_node(doc, source, "wavelength_max", |
---|
| 608 | datainfo.source.wavelength_max, |
---|
[7d6351e] | 609 | {"unit": datainfo.source.wavelength_max_unit}) |
---|
[a7a5886] | 610 | write_node(doc, source, "wavelength_spread", |
---|
| 611 | datainfo.source.wavelength_spread, |
---|
[7d6351e] | 612 | {"unit": datainfo.source.wavelength_spread_unit}) |
---|
[4c00964] | 613 | |
---|
| 614 | # Collimation |
---|
| 615 | for item in datainfo.collimation: |
---|
| 616 | coll = doc.createElement("SAScollimation") |
---|
[579ba85] | 617 | if item.name is not None: |
---|
| 618 | coll.setAttribute("name", str(item.name)) |
---|
[4c00964] | 619 | instr.appendChild(coll) |
---|
| 620 | |
---|
[a7a5886] | 621 | write_node(doc, coll, "length", item.length, |
---|
[7d6351e] | 622 | {"unit": item.length_unit}) |
---|
[4c00964] | 623 | |
---|
| 624 | for apert in item.aperture: |
---|
[579ba85] | 625 | ap = doc.createElement("aperture") |
---|
| 626 | if apert.name is not None: |
---|
| 627 | ap.setAttribute("name", str(apert.name)) |
---|
| 628 | if apert.type is not None: |
---|
| 629 | ap.setAttribute("type", str(apert.type)) |
---|
| 630 | coll.appendChild(ap) |
---|
[4c00964] | 631 | |
---|
| 632 | size = doc.createElement("size") |
---|
[579ba85] | 633 | if apert.size_name is not None: |
---|
| 634 | size.setAttribute("name", str(apert.size_name)) |
---|
[a7a5886] | 635 | written = write_node(doc, size, "x", apert.size.x, |
---|
[7d6351e] | 636 | {"unit": apert.size_unit}) |
---|
[a7a5886] | 637 | written = written | write_node(doc, size, "y", apert.size.y, |
---|
[7d6351e] | 638 | {"unit": apert.size_unit}) |
---|
[a7a5886] | 639 | written = written | write_node(doc, size, "z", apert.size.z, |
---|
[7d6351e] | 640 | {"unit": apert.size_unit}) |
---|
[579ba85] | 641 | if written == True: |
---|
| 642 | ap.appendChild(size) |
---|
[19b628f] | 643 | |
---|
| 644 | write_node(doc, ap, "distance", apert.distance, |
---|
| 645 | {"unit": apert.distance_unit}) |
---|
[4c00964] | 646 | |
---|
| 647 | # Detectors |
---|
| 648 | for item in datainfo.detector: |
---|
| 649 | det = doc.createElement("SASdetector") |
---|
[579ba85] | 650 | written = write_node(doc, det, "name", item.name) |
---|
[a7a5886] | 651 | written = written | write_node(doc, det, "SDD", item.distance, |
---|
[7d6351e] | 652 | {"unit": item.distance_unit}) |
---|
[579ba85] | 653 | if written == True: |
---|
| 654 | instr.appendChild(det) |
---|
[4c00964] | 655 | |
---|
| 656 | off = doc.createElement("offset") |
---|
[a7a5886] | 657 | written = write_node(doc, off, "x", item.offset.x, |
---|
[7d6351e] | 658 | {"unit": item.offset_unit}) |
---|
[a7a5886] | 659 | written = written | write_node(doc, off, "y", item.offset.y, |
---|
[7d6351e] | 660 | {"unit": item.offset_unit}) |
---|
[a7a5886] | 661 | written = written | write_node(doc, off, "z", item.offset.z, |
---|
[7d6351e] | 662 | {"unit": item.offset_unit}) |
---|
[579ba85] | 663 | if written == True: |
---|
| 664 | det.appendChild(off) |
---|
[19b628f] | 665 | |
---|
| 666 | ori = doc.createElement("orientation") |
---|
| 667 | written = write_node(doc, ori, "roll", item.orientation.x, |
---|
| 668 | {"unit": item.orientation_unit}) |
---|
| 669 | written = written | write_node(doc, ori, "pitch", |
---|
| 670 | item.orientation.y, |
---|
| 671 | {"unit": item.orientation_unit}) |
---|
| 672 | written = written | write_node(doc, ori, "yaw", |
---|
| 673 | item.orientation.z, |
---|
| 674 | {"unit": item.orientation_unit}) |
---|
| 675 | if written == True: |
---|
| 676 | det.appendChild(ori) |
---|
[4c00964] | 677 | |
---|
| 678 | center = doc.createElement("beam_center") |
---|
[a7a5886] | 679 | written = write_node(doc, center, "x", item.beam_center.x, |
---|
[7d6351e] | 680 | {"unit": item.beam_center_unit}) |
---|
[a7a5886] | 681 | written = written | write_node(doc, center, "y", |
---|
| 682 | item.beam_center.y, |
---|
[7d6351e] | 683 | {"unit": item.beam_center_unit}) |
---|
[a7a5886] | 684 | written = written | write_node(doc, center, "z", |
---|
| 685 | item.beam_center.z, |
---|
[7d6351e] | 686 | {"unit": item.beam_center_unit}) |
---|
[579ba85] | 687 | if written == True: |
---|
| 688 | det.appendChild(center) |
---|
| 689 | |
---|
[4c00964] | 690 | pix = doc.createElement("pixel_size") |
---|
[a7a5886] | 691 | written = write_node(doc, pix, "x", item.pixel_size.x, |
---|
[7d6351e] | 692 | {"unit": item.pixel_size_unit}) |
---|
[a7a5886] | 693 | written = written | write_node(doc, pix, "y", item.pixel_size.y, |
---|
[7d6351e] | 694 | {"unit": item.pixel_size_unit}) |
---|
[a7a5886] | 695 | written = written | write_node(doc, pix, "z", item.pixel_size.z, |
---|
[7d6351e] | 696 | {"unit": item.pixel_size_unit}) |
---|
[579ba85] | 697 | if written == True: |
---|
| 698 | det.appendChild(pix) |
---|
[19b628f] | 699 | written = written | write_node(doc, det, "slit_length", |
---|
| 700 | item.slit_length, |
---|
| 701 | {"unit": item.slit_length_unit}) |
---|
| 702 | |
---|
[579ba85] | 703 | # Processes info |
---|
[4c00964] | 704 | for item in datainfo.process: |
---|
| 705 | node = doc.createElement("SASprocess") |
---|
| 706 | entry_node.appendChild(node) |
---|
| 707 | |
---|
[579ba85] | 708 | write_node(doc, node, "name", item.name) |
---|
| 709 | write_node(doc, node, "date", item.date) |
---|
| 710 | write_node(doc, node, "description", item.description) |
---|
| 711 | for term in item.term: |
---|
| 712 | value = term['value'] |
---|
| 713 | del term['value'] |
---|
| 714 | write_node(doc, node, "term", value, term) |
---|
| 715 | for note in item.notes: |
---|
| 716 | write_node(doc, node, "SASprocessnote", note) |
---|
[19b628f] | 717 | if len(item.notes) == 0: |
---|
| 718 | write_node(doc, node, "SASprocessnote", "") |
---|
| 719 | |
---|
| 720 | # Note info |
---|
| 721 | if len(datainfo.notes) == 0: |
---|
| 722 | node = doc.createElement("SASnote") |
---|
| 723 | entry_node.appendChild(node) |
---|
| 724 | if node.hasChildNodes(): |
---|
| 725 | for child in node.childNodes: |
---|
| 726 | node.removeChild(child) |
---|
| 727 | else: |
---|
| 728 | for item in datainfo.notes: |
---|
| 729 | node = doc.createElement("SASnote") |
---|
| 730 | entry_node.appendChild(node) |
---|
| 731 | node.appendChild(doc.createTextNode(item)) |
---|
| 732 | |
---|
[b3de3a45] | 733 | # Return the document, and the SASentry node associated with |
---|
| 734 | # the data we just wrote |
---|
| 735 | return doc, entry_node |
---|
| 736 | |
---|
| 737 | def write(self, filename, datainfo): |
---|
| 738 | """ |
---|
[0997158f] | 739 | Write the content of a Data1D as a CanSAS XML file |
---|
| 740 | |
---|
| 741 | :param filename: name of the file to write |
---|
| 742 | :param datainfo: Data1D object |
---|
[b3de3a45] | 743 | """ |
---|
| 744 | # Create XML document |
---|
[7d6351e] | 745 | doc, _ = self._to_xml_doc(datainfo) |
---|
[4c00964] | 746 | # Write the file |
---|
| 747 | fd = open(filename, 'w') |
---|
| 748 | fd.write(doc.toprettyxml()) |
---|
| 749 | fd.close() |
---|