[4bdd4fdb] | 1 | """ |
---|
| 2 | CanSAS data reader |
---|
| 3 | """ |
---|
| 4 | ############################################################################ |
---|
| 5 | #This software was developed by the University of Tennessee as part of the |
---|
| 6 | #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 7 | #project funded by the US National Science Foundation. |
---|
| 8 | #If you use DANSE applications to do scientific research that leads to |
---|
| 9 | #publication, we ask that you acknowledge the use of the software with the |
---|
| 10 | #following sentence: |
---|
| 11 | #This work benefited from DANSE software developed under NSF award DMR-0520547. |
---|
| 12 | #copyright 2008,2009 University of Tennessee |
---|
| 13 | ############################################################################# |
---|
| 14 | |
---|
| 15 | # Known issue: reader not compatible with multiple SASdata entries |
---|
| 16 | # within a single SASentry. Will raise a runtime error. |
---|
| 17 | |
---|
| 18 | #TODO: check that all vectors are written only if they have at |
---|
| 19 | # least one non-empty value |
---|
| 20 | #TODO: Writing only allows one SASentry per file. |
---|
| 21 | # Would be best to allow multiple entries. |
---|
| 22 | #TODO: Store error list |
---|
| 23 | #TODO: Allow for additional meta data for each section |
---|
| 24 | #TODO: Notes need to be implemented. They can be any XML |
---|
| 25 | # structure in version 1.0 |
---|
| 26 | # Process notes have the same problem. |
---|
| 27 | #TODO: Unit conversion is not complete (temperature units are missing) |
---|
| 28 | |
---|
| 29 | import logging |
---|
| 30 | import numpy |
---|
| 31 | import os |
---|
| 32 | import sys |
---|
| 33 | from sans.dataloader.data_info import Data1D |
---|
| 34 | from sans.dataloader.data_info import Collimation |
---|
| 35 | from sans.dataloader.data_info import Detector |
---|
| 36 | from sans.dataloader.data_info import Process |
---|
| 37 | from sans.dataloader.data_info import Aperture |
---|
| 38 | from lxml import etree |
---|
| 39 | import xml.dom.minidom |
---|
| 40 | _ZERO = 1e-16 |
---|
| 41 | HAS_CONVERTER = True |
---|
| 42 | try: |
---|
| 43 | from data_util.nxsunit import Converter |
---|
| 44 | except: |
---|
| 45 | HAS_CONVERTER = False |
---|
| 46 | |
---|
| 47 | CANSAS_NS = "cansas1d/1.0" |
---|
| 48 | ALLOW_ALL = True |
---|
| 49 | |
---|
| 50 | |
---|
| 51 | def write_node(doc, parent, name, value, attr={}): |
---|
| 52 | """ |
---|
| 53 | :param doc: document DOM |
---|
| 54 | :param parent: parent node |
---|
| 55 | :param name: tag of the element |
---|
| 56 | :param value: value of the child text node |
---|
| 57 | :param attr: attribute dictionary |
---|
| 58 | |
---|
| 59 | :return: True if something was appended, otherwise False |
---|
| 60 | """ |
---|
| 61 | if value is not None: |
---|
| 62 | node = doc.createElement(name) |
---|
| 63 | node.appendChild(doc.createTextNode(str(value))) |
---|
| 64 | for item in attr: |
---|
| 65 | node.setAttribute(item, attr[item]) |
---|
| 66 | parent.appendChild(node) |
---|
| 67 | return True |
---|
| 68 | return False |
---|
| 69 | |
---|
| 70 | |
---|
| 71 | def get_content(location, node): |
---|
| 72 | """ |
---|
| 73 | Get the first instance of the content of a xpath location. |
---|
| 74 | |
---|
| 75 | :param location: xpath location |
---|
| 76 | :param node: node to start at |
---|
| 77 | |
---|
| 78 | :return: Element, or None |
---|
| 79 | """ |
---|
| 80 | nodes = node.xpath(location, namespaces={'ns': CANSAS_NS}) |
---|
| 81 | |
---|
| 82 | if len(nodes) > 0: |
---|
| 83 | return nodes[0] |
---|
| 84 | else: |
---|
| 85 | return None |
---|
| 86 | |
---|
| 87 | |
---|
| 88 | def get_float(location, node): |
---|
| 89 | """ |
---|
| 90 | Get the content of a node as a float |
---|
| 91 | |
---|
| 92 | :param location: xpath location |
---|
| 93 | :param node: node to start at |
---|
| 94 | """ |
---|
| 95 | nodes = node.xpath(location, namespaces={'ns': CANSAS_NS}) |
---|
| 96 | |
---|
| 97 | value = None |
---|
| 98 | attr = {} |
---|
| 99 | if len(nodes) > 0: |
---|
| 100 | try: |
---|
| 101 | value = float(nodes[0].text) |
---|
| 102 | except: |
---|
| 103 | # Could not pass, skip and return None |
---|
| 104 | msg = "cansas_reader.get_float: could not " |
---|
| 105 | msg += " convert '%s' to float" % nodes[0].text |
---|
| 106 | logging.error(msg) |
---|
| 107 | if nodes[0].get('unit') is not None: |
---|
| 108 | attr['unit'] = nodes[0].get('unit') |
---|
| 109 | return value, attr |
---|
| 110 | |
---|
| 111 | |
---|
| 112 | class Reader: |
---|
| 113 | """ |
---|
| 114 | Class to load cansas 1D XML files |
---|
| 115 | |
---|
| 116 | :Dependencies: |
---|
| 117 | The CanSas reader requires PyXML 0.8.4 or later. |
---|
| 118 | """ |
---|
| 119 | ## CanSAS version |
---|
| 120 | version = '1.0' |
---|
| 121 | ## File type |
---|
| 122 | type_name = "CanSAS 1D" |
---|
| 123 | ## Wildcards |
---|
| 124 | type = ["CanSAS 1D files (*.xml)|*.xml", |
---|
| 125 | "CanSAS 1D AVE files (*.AVEx)|*.AVEx", |
---|
| 126 | "CanSAS 1D AVE files (*.ABSx)|*.ABSx"] |
---|
| 127 | |
---|
| 128 | ## List of allowed extensions |
---|
| 129 | ext = ['.xml', '.XML', '.avex', '.AVEx', '.absx', 'ABSx'] |
---|
| 130 | |
---|
| 131 | def __init__(self): |
---|
| 132 | ## List of errors |
---|
| 133 | self.errors = [] |
---|
| 134 | |
---|
| 135 | |
---|
| 136 | def read(self, path): |
---|
| 137 | output = [] |
---|
| 138 | ns = [] |
---|
| 139 | if os.path.isfile(path): |
---|
| 140 | basename = os.path.basename(path) |
---|
| 141 | root, extension = os.path.splitext(basename) |
---|
| 142 | if ALLOW_ALL or extension.lower() in self.ext: |
---|
| 143 | try: |
---|
| 144 | tree = etree.parse(path, parser=etree.ETCompatXMLParser()) |
---|
| 145 | # Check the format version number |
---|
| 146 | # Specifying the namespace will take care of the file |
---|
| 147 | # format version |
---|
| 148 | root = tree.getroot() |
---|
| 149 | output.append("I made it here 1") |
---|
| 150 | base_ns = '/ns:SASroot/ns:SASentry' |
---|
| 151 | ns.append('SASroot') |
---|
| 152 | ns.append('SASentry') |
---|
| 153 | entry_list = root.xpath(base_ns, |
---|
| 154 | namespaces={'ns': CANSAS_NS}) |
---|
| 155 | |
---|
| 156 | output.append("I made it here 2") |
---|
| 157 | for entry in entry_list: |
---|
| 158 | output.append(etree.tostring(entry)) |
---|
| 159 | new_name_entry = self._parse_entry(entry, ns) |
---|
| 160 | output.append(new_name_entry) |
---|
| 161 | except Exception as e: |
---|
| 162 | exc_type, exc_obj, exc_tb = sys.exc_info() |
---|
| 163 | fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] |
---|
| 164 | print(exc_type, fname, exc_tb.tb_lineno, output) |
---|
| 165 | |
---|
| 166 | return output |
---|
| 167 | |
---|
| 168 | def _parse_entry(self, dom, ns): |
---|
| 169 | """ |
---|
| 170 | Parse a SASEntry - new recursive method for parsing the dom of |
---|
| 171 | the CanSAS data format. This will allow multiple data files |
---|
| 172 | and extra nodes to be read in simultaneously. |
---|
| 173 | |
---|
| 174 | :param dom: dom object with a namespace base of ns |
---|
| 175 | |
---|
| 176 | :param ns: A list of namespaces that lead up to the dom object |
---|
| 177 | """ |
---|
| 178 | |
---|
| 179 | # base method variables |
---|
| 180 | # namespace - the full dom namespace of the current level |
---|
| 181 | # tag - the name of the tag at the current level |
---|
| 182 | # text - The text of the current tag |
---|
| 183 | # attributes - A dictionary of attributes of the current tag |
---|
| 184 | # level - The recursion level within the DOM. Used for determining |
---|
| 185 | # how to apply the data |
---|
| 186 | namespace = '' |
---|
| 187 | tag = '' |
---|
| 188 | text = '' |
---|
| 189 | attributes = {} |
---|
| 190 | namespace_dictionary = {} |
---|
| 191 | level = len(ns) |
---|
| 192 | |
---|
| 193 | for name in ns: |
---|
| 194 | namespace += 'ns:' + name + "/" |
---|
| 195 | try: |
---|
| 196 | nodes = dom.xpath(namespace, namespaces={'ns': CANSAS_NS}) |
---|
| 197 | |
---|
| 198 | for node in nodes: |
---|
| 199 | nodes_in_node = len(node) |
---|
| 200 | if nodes_in_node > 1: |
---|
| 201 | ns.append(node.tag) |
---|
| 202 | self._parse_entry_new(node, ns) |
---|
| 203 | else: |
---|
| 204 | fullns = '' |
---|
| 205 | tag = node.tag |
---|
| 206 | text = node.text |
---|
| 207 | for name, value in node.items(): |
---|
| 208 | attributes.append(name, value) |
---|
| 209 | basenamespace = ns.pop().strip('SAS') |
---|
| 210 | name_space_dictionary.append(tag, text, attributes) |
---|
| 211 | dictionary.append(namespace, name_space_dictionary) |
---|
| 212 | |
---|
| 213 | except Exception as e: |
---|
| 214 | exc_type, exc_obj, exc_tb = sys.exc_info() |
---|
| 215 | fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] |
---|
| 216 | dictionary.append("{!s}, {!s}, {!s}".format(exc_type, fname, exc_tb.tb_lineno)) |
---|
| 217 | |
---|
| 218 | return dictionary |
---|
| 219 | |
---|
| 220 | def _to_xml_doc(self, datainfo): |
---|
| 221 | """ |
---|
| 222 | Create an XML document to contain the content of a Data1D |
---|
| 223 | |
---|
| 224 | :param datainfo: Data1D object |
---|
| 225 | """ |
---|
| 226 | |
---|
| 227 | if not issubclass(datainfo.__class__, Data1D): |
---|
| 228 | raise RuntimeError, "The cansas writer expects a Data1D instance" |
---|
| 229 | |
---|
| 230 | doc = xml.dom.minidom.Document() |
---|
| 231 | main_node = doc.createElement("SASroot") |
---|
| 232 | main_node.setAttribute("version", self.version) |
---|
| 233 | main_node.setAttribute("xmlns", "cansas1d/%s" % self.version) |
---|
| 234 | main_node.setAttribute("xmlns:xsi", |
---|
| 235 | "http://www.w3.org/2001/XMLSchema-instance") |
---|
| 236 | main_node.setAttribute("xsi:schemaLocation", |
---|
| 237 | "cansas1d/%s http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd" % self.version) |
---|
| 238 | |
---|
| 239 | doc.appendChild(main_node) |
---|
| 240 | |
---|
| 241 | entry_node = doc.createElement("SASentry") |
---|
| 242 | main_node.appendChild(entry_node) |
---|
| 243 | |
---|
| 244 | write_node(doc, entry_node, "Title", datainfo.title) |
---|
| 245 | for item in datainfo.run: |
---|
| 246 | runname = {} |
---|
| 247 | if item in datainfo.run_name and \ |
---|
| 248 | len(str(datainfo.run_name[item])) > 1: |
---|
| 249 | runname = {'name': datainfo.run_name[item]} |
---|
| 250 | write_node(doc, entry_node, "Run", item, runname) |
---|
| 251 | |
---|
| 252 | # Data info |
---|
| 253 | node = doc.createElement("SASdata") |
---|
| 254 | entry_node.appendChild(node) |
---|
| 255 | |
---|
| 256 | for i in range(len(datainfo.x)): |
---|
| 257 | pt = doc.createElement("Idata") |
---|
| 258 | node.appendChild(pt) |
---|
| 259 | write_node(doc, pt, "Q", datainfo.x[i], {'unit': datainfo.x_unit}) |
---|
| 260 | if len(datainfo.y) >= i: |
---|
| 261 | write_node(doc, pt, "I", datainfo.y[i], |
---|
| 262 | {'unit': datainfo.y_unit}) |
---|
| 263 | if datainfo.dx != None and len(datainfo.dx) >= i: |
---|
| 264 | write_node(doc, pt, "Qdev", datainfo.dx[i], |
---|
| 265 | {'unit': datainfo.x_unit}) |
---|
| 266 | if datainfo.dxl != None and len(datainfo.dxl) >= i: |
---|
| 267 | write_node(doc, pt, "dQl", datainfo.dxl[i], |
---|
| 268 | {'unit': datainfo.x_unit}) |
---|
| 269 | if datainfo.dxw != None and len(datainfo.dxw) >= i: |
---|
| 270 | write_node(doc, pt, "dQw", datainfo.dxw[i], |
---|
| 271 | {'unit': datainfo.x_unit}) |
---|
| 272 | if datainfo.dy != None and len(datainfo.dy) >= i: |
---|
| 273 | write_node(doc, pt, "Idev", datainfo.dy[i], |
---|
| 274 | {'unit': datainfo.y_unit}) |
---|
| 275 | |
---|
| 276 | # Sample info |
---|
| 277 | sample = doc.createElement("SASsample") |
---|
| 278 | if datainfo.sample.name is not None: |
---|
| 279 | sample.setAttribute("name", str(datainfo.sample.name)) |
---|
| 280 | entry_node.appendChild(sample) |
---|
| 281 | write_node(doc, sample, "ID", str(datainfo.sample.ID)) |
---|
| 282 | write_node(doc, sample, "thickness", datainfo.sample.thickness, |
---|
| 283 | {"unit": datainfo.sample.thickness_unit}) |
---|
| 284 | write_node(doc, sample, "transmission", datainfo.sample.transmission) |
---|
| 285 | write_node(doc, sample, "temperature", datainfo.sample.temperature, |
---|
| 286 | {"unit": datainfo.sample.temperature_unit}) |
---|
| 287 | |
---|
| 288 | for item in datainfo.sample.details: |
---|
| 289 | write_node(doc, sample, "details", item) |
---|
| 290 | |
---|
| 291 | pos = doc.createElement("position") |
---|
| 292 | written = write_node(doc, pos, "x", datainfo.sample.position.x, |
---|
| 293 | {"unit": datainfo.sample.position_unit}) |
---|
| 294 | written = written | write_node(doc, pos, "y", |
---|
| 295 | datainfo.sample.position.y, |
---|
| 296 | {"unit": datainfo.sample.position_unit}) |
---|
| 297 | written = written | write_node(doc, pos, "z", |
---|
| 298 | datainfo.sample.position.z, |
---|
| 299 | {"unit": datainfo.sample.position_unit}) |
---|
| 300 | if written == True: |
---|
| 301 | sample.appendChild(pos) |
---|
| 302 | |
---|
| 303 | ori = doc.createElement("orientation") |
---|
| 304 | written = write_node(doc, ori, "roll", |
---|
| 305 | datainfo.sample.orientation.x, |
---|
| 306 | {"unit": datainfo.sample.orientation_unit}) |
---|
| 307 | written = written | write_node(doc, ori, "pitch", |
---|
| 308 | datainfo.sample.orientation.y, |
---|
| 309 | {"unit": datainfo.sample.orientation_unit}) |
---|
| 310 | written = written | write_node(doc, ori, "yaw", |
---|
| 311 | datainfo.sample.orientation.z, |
---|
| 312 | {"unit": datainfo.sample.orientation_unit}) |
---|
| 313 | if written == True: |
---|
| 314 | sample.appendChild(ori) |
---|
| 315 | |
---|
| 316 | # Instrument info |
---|
| 317 | instr = doc.createElement("SASinstrument") |
---|
| 318 | entry_node.appendChild(instr) |
---|
| 319 | |
---|
| 320 | write_node(doc, instr, "name", datainfo.instrument) |
---|
| 321 | |
---|
| 322 | # Source |
---|
| 323 | source = doc.createElement("SASsource") |
---|
| 324 | if datainfo.source.name is not None: |
---|
| 325 | source.setAttribute("name", str(datainfo.source.name)) |
---|
| 326 | instr.appendChild(source) |
---|
| 327 | |
---|
| 328 | write_node(doc, source, "radiation", datainfo.source.radiation) |
---|
| 329 | write_node(doc, source, "beam_shape", datainfo.source.beam_shape) |
---|
| 330 | size = doc.createElement("beam_size") |
---|
| 331 | if datainfo.source.beam_size_name is not None: |
---|
| 332 | size.setAttribute("name", str(datainfo.source.beam_size_name)) |
---|
| 333 | written = write_node(doc, size, "x", datainfo.source.beam_size.x, |
---|
| 334 | {"unit": datainfo.source.beam_size_unit}) |
---|
| 335 | written = written | write_node(doc, size, "y", |
---|
| 336 | datainfo.source.beam_size.y, |
---|
| 337 | {"unit": datainfo.source.beam_size_unit}) |
---|
| 338 | written = written | write_node(doc, size, "z", |
---|
| 339 | datainfo.source.beam_size.z, |
---|
| 340 | {"unit": datainfo.source.beam_size_unit}) |
---|
| 341 | if written == True: |
---|
| 342 | source.appendChild(size) |
---|
| 343 | |
---|
| 344 | write_node(doc, source, "wavelength", |
---|
| 345 | datainfo.source.wavelength, |
---|
| 346 | {"unit": datainfo.source.wavelength_unit}) |
---|
| 347 | write_node(doc, source, "wavelength_min", |
---|
| 348 | datainfo.source.wavelength_min, |
---|
| 349 | {"unit": datainfo.source.wavelength_min_unit}) |
---|
| 350 | write_node(doc, source, "wavelength_max", |
---|
| 351 | datainfo.source.wavelength_max, |
---|
| 352 | {"unit": datainfo.source.wavelength_max_unit}) |
---|
| 353 | write_node(doc, source, "wavelength_spread", |
---|
| 354 | datainfo.source.wavelength_spread, |
---|
| 355 | {"unit": datainfo.source.wavelength_spread_unit}) |
---|
| 356 | |
---|
| 357 | # Collimation |
---|
| 358 | for item in datainfo.collimation: |
---|
| 359 | coll = doc.createElement("SAScollimation") |
---|
| 360 | if item.name is not None: |
---|
| 361 | coll.setAttribute("name", str(item.name)) |
---|
| 362 | instr.appendChild(coll) |
---|
| 363 | |
---|
| 364 | write_node(doc, coll, "length", item.length, |
---|
| 365 | {"unit": item.length_unit}) |
---|
| 366 | |
---|
| 367 | for apert in item.aperture: |
---|
| 368 | ap = doc.createElement("aperture") |
---|
| 369 | if apert.name is not None: |
---|
| 370 | ap.setAttribute("name", str(apert.name)) |
---|
| 371 | if apert.type is not None: |
---|
| 372 | ap.setAttribute("type", str(apert.type)) |
---|
| 373 | coll.appendChild(ap) |
---|
| 374 | |
---|
| 375 | write_node(doc, ap, "distance", apert.distance, |
---|
| 376 | {"unit": apert.distance_unit}) |
---|
| 377 | |
---|
| 378 | size = doc.createElement("size") |
---|
| 379 | if apert.size_name is not None: |
---|
| 380 | size.setAttribute("name", str(apert.size_name)) |
---|
| 381 | written = write_node(doc, size, "x", apert.size.x, |
---|
| 382 | {"unit": apert.size_unit}) |
---|
| 383 | written = written | write_node(doc, size, "y", apert.size.y, |
---|
| 384 | {"unit": apert.size_unit}) |
---|
| 385 | written = written | write_node(doc, size, "z", apert.size.z, |
---|
| 386 | {"unit": apert.size_unit}) |
---|
| 387 | if written == True: |
---|
| 388 | ap.appendChild(size) |
---|
| 389 | |
---|
| 390 | # Detectors |
---|
| 391 | for item in datainfo.detector: |
---|
| 392 | det = doc.createElement("SASdetector") |
---|
| 393 | written = write_node(doc, det, "name", item.name) |
---|
| 394 | written = written | write_node(doc, det, "SDD", item.distance, |
---|
| 395 | {"unit": item.distance_unit}) |
---|
| 396 | written = written | write_node(doc, det, "slit_length", |
---|
| 397 | item.slit_length, |
---|
| 398 | {"unit": item.slit_length_unit}) |
---|
| 399 | if written == True: |
---|
| 400 | instr.appendChild(det) |
---|
| 401 | |
---|
| 402 | off = doc.createElement("offset") |
---|
| 403 | written = write_node(doc, off, "x", item.offset.x, |
---|
| 404 | {"unit": item.offset_unit}) |
---|
| 405 | written = written | write_node(doc, off, "y", item.offset.y, |
---|
| 406 | {"unit": item.offset_unit}) |
---|
| 407 | written = written | write_node(doc, off, "z", item.offset.z, |
---|
| 408 | {"unit": item.offset_unit}) |
---|
| 409 | if written == True: |
---|
| 410 | det.appendChild(off) |
---|
| 411 | |
---|
| 412 | center = doc.createElement("beam_center") |
---|
| 413 | written = write_node(doc, center, "x", item.beam_center.x, |
---|
| 414 | {"unit": item.beam_center_unit}) |
---|
| 415 | written = written | write_node(doc, center, "y", |
---|
| 416 | item.beam_center.y, |
---|
| 417 | {"unit": item.beam_center_unit}) |
---|
| 418 | written = written | write_node(doc, center, "z", |
---|
| 419 | item.beam_center.z, |
---|
| 420 | {"unit": item.beam_center_unit}) |
---|
| 421 | if written == True: |
---|
| 422 | det.appendChild(center) |
---|
| 423 | |
---|
| 424 | pix = doc.createElement("pixel_size") |
---|
| 425 | written = write_node(doc, pix, "x", item.pixel_size.x, |
---|
| 426 | {"unit": item.pixel_size_unit}) |
---|
| 427 | written = written | write_node(doc, pix, "y", item.pixel_size.y, |
---|
| 428 | {"unit": item.pixel_size_unit}) |
---|
| 429 | written = written | write_node(doc, pix, "z", item.pixel_size.z, |
---|
| 430 | {"unit": item.pixel_size_unit}) |
---|
| 431 | if written == True: |
---|
| 432 | det.appendChild(pix) |
---|
| 433 | |
---|
| 434 | ori = doc.createElement("orientation") |
---|
| 435 | written = write_node(doc, ori, "roll", item.orientation.x, |
---|
| 436 | {"unit": item.orientation_unit}) |
---|
| 437 | written = written | write_node(doc, ori, "pitch", |
---|
| 438 | item.orientation.y, |
---|
| 439 | {"unit": item.orientation_unit}) |
---|
| 440 | written = written | write_node(doc, ori, "yaw", |
---|
| 441 | item.orientation.z, |
---|
| 442 | {"unit": item.orientation_unit}) |
---|
| 443 | if written == True: |
---|
| 444 | det.appendChild(ori) |
---|
| 445 | |
---|
| 446 | # Processes info |
---|
| 447 | for item in datainfo.process: |
---|
| 448 | node = doc.createElement("SASprocess") |
---|
| 449 | entry_node.appendChild(node) |
---|
| 450 | |
---|
| 451 | write_node(doc, node, "name", item.name) |
---|
| 452 | write_node(doc, node, "date", item.date) |
---|
| 453 | write_node(doc, node, "description", item.description) |
---|
| 454 | for term in item.term: |
---|
| 455 | value = term['value'] |
---|
| 456 | del term['value'] |
---|
| 457 | write_node(doc, node, "term", value, term) |
---|
| 458 | for note in item.notes: |
---|
| 459 | write_node(doc, node, "SASprocessnote", note) |
---|
| 460 | |
---|
| 461 | # Return the document, and the SASentry node associated with |
---|
| 462 | # the data we just wrote |
---|
| 463 | return doc, entry_node |
---|
| 464 | |
---|
| 465 | def write(self, filename, datainfo): |
---|
| 466 | """ |
---|
| 467 | Write the content of a Data1D as a CanSAS XML file |
---|
| 468 | |
---|
| 469 | :param filename: name of the file to write |
---|
| 470 | :param datainfo: Data1D object |
---|
| 471 | """ |
---|
| 472 | # Create XML document |
---|
| 473 | doc, _ = self._to_xml_doc(datainfo) |
---|
| 474 | # Write the file |
---|
| 475 | fd = open(filename, 'w') |
---|
| 476 | fd.write(doc.toprettyxml()) |
---|
| 477 | fd.close() |
---|
| 478 | |
---|
| 479 | def _store_float(self, location, node, variable, storage, optional=True): |
---|
| 480 | """ |
---|
| 481 | Get the content of a xpath location and store |
---|
| 482 | the result. Check that the units are compatible |
---|
| 483 | with the destination. The value is expected to |
---|
| 484 | be a float. |
---|
| 485 | |
---|
| 486 | The xpath location might or might not exist. |
---|
| 487 | If it does not exist, nothing is done |
---|
| 488 | |
---|
| 489 | :param location: xpath location to fetch |
---|
| 490 | :param node: node to read the data from |
---|
| 491 | :param variable: name of the data member to store it in [string] |
---|
| 492 | :param storage: data object that has the 'variable' data member |
---|
| 493 | :param optional: if True, no exception will be raised |
---|
| 494 | if unit conversion can't be done |
---|
| 495 | |
---|
| 496 | :raise ValueError: raised when the units are not recognized |
---|
| 497 | """ |
---|
| 498 | entry = get_content(location, node) |
---|
| 499 | try: |
---|
| 500 | value = float(entry.text) |
---|
| 501 | except: |
---|
| 502 | value = None |
---|
| 503 | |
---|
| 504 | if value is not None: |
---|
| 505 | # If the entry has units, check to see that they are |
---|
| 506 | # compatible with what we currently have in the data object |
---|
| 507 | units = entry.get('unit') |
---|
| 508 | if units is not None: |
---|
| 509 | toks = variable.split('.') |
---|
| 510 | local_unit = None |
---|
| 511 | exec "local_unit = storage.%s_unit" % toks[0] |
---|
| 512 | if local_unit != None and units.lower() != local_unit.lower(): |
---|
| 513 | if HAS_CONVERTER == True: |
---|
| 514 | try: |
---|
| 515 | conv = Converter(units) |
---|
| 516 | exec "storage.%s = %g" % (variable, |
---|
| 517 | conv(value, units=local_unit)) |
---|
| 518 | except: |
---|
| 519 | err_mess = "CanSAS reader: could not convert" |
---|
| 520 | err_mess += " %s unit [%s]; expecting [%s]\n %s" \ |
---|
| 521 | % (variable, units, local_unit, sys.exc_value) |
---|
| 522 | self.errors.append(err_mess) |
---|
| 523 | if optional: |
---|
| 524 | logging.info(err_mess) |
---|
| 525 | else: |
---|
| 526 | raise ValueError, err_mess |
---|
| 527 | else: |
---|
| 528 | err_mess = "CanSAS reader: unrecognized %s unit [%s];"\ |
---|
| 529 | % (variable, units) |
---|
| 530 | err_mess += " expecting [%s]" % local_unit |
---|
| 531 | self.errors.append(err_mess) |
---|
| 532 | if optional: |
---|
| 533 | logging.info(err_mess) |
---|
| 534 | else: |
---|
| 535 | raise ValueError, err_mess |
---|
| 536 | else: |
---|
| 537 | exec "storage.%s = value" % variable |
---|
| 538 | else: |
---|
| 539 | exec "storage.%s = value" % variable |
---|
| 540 | |
---|
| 541 | def _store_content(self, location, node, variable, storage): |
---|
| 542 | """ |
---|
| 543 | Get the content of a xpath location and store |
---|
| 544 | the result. The value is treated as a string. |
---|
| 545 | |
---|
| 546 | The xpath location might or might not exist. |
---|
| 547 | If it does not exist, nothing is done |
---|
| 548 | |
---|
| 549 | :param location: xpath location to fetch |
---|
| 550 | :param node: node to read the data from |
---|
| 551 | :param variable: name of the data member to store it in [string] |
---|
| 552 | :param storage: data object that has the 'variable' data member |
---|
| 553 | |
---|
| 554 | :return: return a list of errors |
---|
| 555 | """ |
---|
| 556 | entry = get_content(location, node) |
---|
| 557 | if entry is not None and entry.text is not None: |
---|
| 558 | exec "storage.%s = entry.text.strip()" % variable |
---|