[8780e9a] | 1 | """ |
---|
| 2 | This software was developed by the University of Tennessee as part of the |
---|
| 3 | Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 4 | project funded by the US National Science Foundation. |
---|
| 5 | |
---|
| 6 | See the license text in license.txt |
---|
| 7 | |
---|
| 8 | copyright 2008, University of Tennessee |
---|
| 9 | """ |
---|
[579ba85] | 10 | # Known issue: reader not compatible with multiple SASdata entries |
---|
| 11 | # within a single SASentry. Will raise a runtime error. |
---|
[8780e9a] | 12 | |
---|
[4c00964] | 13 | #TODO: check that all vectors are written only if they have at least one non-empty value |
---|
[579ba85] | 14 | #TODO: Writing only allows one SASentry per file. Would be best to allow multiple entries. |
---|
[8780e9a] | 15 | #TODO: Store error list |
---|
| 16 | #TODO: Allow for additional meta data for each section |
---|
| 17 | #TODO: Notes need to be implemented. They can be any XML structure in version 1.0 |
---|
| 18 | # Process notes have the same problem. |
---|
| 19 | |
---|
| 20 | |
---|
| 21 | import logging |
---|
| 22 | import numpy |
---|
| 23 | import os, sys |
---|
[d6513cd] | 24 | from DataLoader.data_info import Data1D, Collimation, Detector, Process, Aperture |
---|
[8780e9a] | 25 | from xml import xpath |
---|
[4c00964] | 26 | import xml.dom.minidom |
---|
| 27 | |
---|
[8780e9a] | 28 | |
---|
[b39c817] | 29 | has_converter = True |
---|
| 30 | try: |
---|
| 31 | from data_util.nxsunit import Converter |
---|
| 32 | except: |
---|
| 33 | has_converter = False |
---|
| 34 | |
---|
[4c00964] | 35 | def write_node(doc, parent, name, value, attr={}): |
---|
| 36 | """ |
---|
| 37 | @param doc: document DOM |
---|
| 38 | @param parent: parent node |
---|
| 39 | @param name: tag of the element |
---|
| 40 | @param value: value of the child text node |
---|
| 41 | @param attr: attribute dictionary |
---|
| 42 | @return: True if something was appended, otherwise False |
---|
| 43 | """ |
---|
| 44 | if value is not None: |
---|
| 45 | node = doc.createElement(name) |
---|
| 46 | node.appendChild(doc.createTextNode(str(value))) |
---|
| 47 | for item in attr: |
---|
| 48 | node.setAttribute(item, attr[item]) |
---|
| 49 | parent.appendChild(node) |
---|
| 50 | return True |
---|
| 51 | return False |
---|
| 52 | |
---|
[8780e9a] | 53 | def get_node_text(node): |
---|
| 54 | """ |
---|
| 55 | Get the text context of a node |
---|
| 56 | |
---|
| 57 | @param node: node to read from |
---|
| 58 | @return: content, attribute list |
---|
| 59 | """ |
---|
| 60 | content = None |
---|
| 61 | attr = {} |
---|
| 62 | for item in node.childNodes: |
---|
| 63 | if item.nodeName.find('text')>=0 \ |
---|
| 64 | and len(item.nodeValue.strip())>0: |
---|
| 65 | content = item.nodeValue.strip() |
---|
| 66 | break |
---|
| 67 | |
---|
| 68 | if node.hasAttributes(): |
---|
| 69 | for i in range(node.attributes.length): |
---|
| 70 | attr[node.attributes.item(i).nodeName] \ |
---|
| 71 | = node.attributes.item(i).nodeValue |
---|
| 72 | |
---|
| 73 | return content, attr |
---|
| 74 | |
---|
| 75 | def get_content(location, node): |
---|
| 76 | """ |
---|
| 77 | Get the first instance of the content of a xpath location |
---|
| 78 | |
---|
| 79 | @param location: xpath location |
---|
| 80 | @param node: node to start at |
---|
| 81 | """ |
---|
| 82 | value = None |
---|
| 83 | attr = {} |
---|
| 84 | nodes = xpath.Evaluate(location, node) |
---|
| 85 | if len(nodes)>0: |
---|
| 86 | try: |
---|
| 87 | # Skip comments and empty lines |
---|
| 88 | for item in nodes[0].childNodes: |
---|
| 89 | if item.nodeName.find('text')>=0 \ |
---|
| 90 | and len(item.nodeValue.strip())>0: |
---|
| 91 | value = item.nodeValue.strip() |
---|
| 92 | break |
---|
| 93 | |
---|
[d6513cd] | 94 | if nodes[0].hasAttributes(): |
---|
| 95 | for i in range(nodes[0].attributes.length): |
---|
| 96 | attr[nodes[0].attributes.item(i).nodeName] \ |
---|
| 97 | = nodes[0].attributes.item(i).nodeValue |
---|
[8780e9a] | 98 | except: |
---|
| 99 | # problem reading the node. Skip it and return that |
---|
| 100 | # nothing was found |
---|
| 101 | logging.error("cansas_reader.get_content: %s\n %s" % (location, sys.exc_value)) |
---|
| 102 | |
---|
| 103 | return value, attr |
---|
| 104 | |
---|
| 105 | def get_float(location, node): |
---|
| 106 | """ |
---|
| 107 | Get the content of a node as a float |
---|
| 108 | |
---|
| 109 | @param location: xpath location |
---|
| 110 | @param node: node to start at |
---|
| 111 | """ |
---|
| 112 | value = None |
---|
| 113 | attr = {} |
---|
| 114 | content, attr = get_content(location, node) |
---|
| 115 | if content is not None: |
---|
| 116 | try: |
---|
[b39c817] | 117 | value = float(content) |
---|
[8780e9a] | 118 | except: |
---|
| 119 | # Could not pass, skip and return None |
---|
| 120 | logging.error("cansas_reader.get_float: could not convert '%s' to float" % content) |
---|
| 121 | |
---|
| 122 | return value, attr |
---|
| 123 | |
---|
| 124 | def _store_float(location, node, variable, storage): |
---|
| 125 | """ |
---|
| 126 | Get the content of a xpath location and store |
---|
| 127 | the result. Check that the units are compatible |
---|
| 128 | with the destination. The value is expected to |
---|
| 129 | be a float. |
---|
| 130 | |
---|
| 131 | The xpath location might or might not exist. |
---|
| 132 | If it does not exist, nothing is done |
---|
| 133 | |
---|
| 134 | @param location: xpath location to fetch |
---|
| 135 | @param node: node to read the data from |
---|
| 136 | @param variable: name of the data member to store it in [string] |
---|
| 137 | @param storage: data object that has the 'variable' data member |
---|
| 138 | |
---|
| 139 | @raise ValueError: raised when the units are not recognized |
---|
| 140 | """ |
---|
| 141 | value, attr = get_float(location, node) |
---|
| 142 | if value is not None: |
---|
| 143 | # If the entry has units, check to see that they are |
---|
| 144 | # compatible with what we currently have in the data object |
---|
| 145 | if attr.has_key('unit'): |
---|
| 146 | toks = variable.split('.') |
---|
| 147 | exec "local_unit = storage.%s_unit.lower()" % toks[0] |
---|
| 148 | if attr['unit'].lower()!=local_unit: |
---|
[b39c817] | 149 | if has_converter==True: |
---|
| 150 | try: |
---|
| 151 | conv = Converter(attr['unit']) |
---|
| 152 | exec "storage.%s = %g" % (variable, conv(value, units=local_unit)) |
---|
| 153 | except: |
---|
| 154 | raise ValueError, "CanSAS reader: could not convert %s unit [%s]; expecting [%s]\n %s" \ |
---|
| 155 | % (variable, attr['unit'], local_unit, sys.exc_value) |
---|
| 156 | else: |
---|
| 157 | raise ValueError, "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ |
---|
| 158 | % (variable, attr['unit'], local_unit) |
---|
| 159 | else: |
---|
| 160 | exec "storage.%s = value" % variable |
---|
| 161 | else: |
---|
| 162 | exec "storage.%s = value" % variable |
---|
| 163 | |
---|
[8780e9a] | 164 | |
---|
| 165 | def _store_content(location, node, variable, storage): |
---|
| 166 | """ |
---|
| 167 | Get the content of a xpath location and store |
---|
| 168 | the result. The value is treated as a string. |
---|
| 169 | |
---|
| 170 | The xpath location might or might not exist. |
---|
| 171 | If it does not exist, nothing is done |
---|
| 172 | |
---|
| 173 | @param location: xpath location to fetch |
---|
| 174 | @param node: node to read the data from |
---|
| 175 | @param variable: name of the data member to store it in [string] |
---|
| 176 | @param storage: data object that has the 'variable' data member |
---|
| 177 | """ |
---|
| 178 | value, attr = get_content(location, node) |
---|
| 179 | if value is not None: |
---|
| 180 | exec "storage.%s = value" % variable |
---|
| 181 | |
---|
| 182 | |
---|
| 183 | class Reader: |
---|
| 184 | """ |
---|
| 185 | Class to load cansas 1D XML files |
---|
| 186 | |
---|
| 187 | Dependencies: |
---|
| 188 | The CanSas reader requires PyXML 0.8.4 or later. |
---|
| 189 | """ |
---|
| 190 | ## CanSAS version |
---|
| 191 | version = '1.0' |
---|
| 192 | ## File type |
---|
| 193 | type = ["CanSAS 1D files (*.xml)|*.xml"] |
---|
| 194 | ## List of allowed extensions |
---|
| 195 | ext=['.xml', '.XML'] |
---|
| 196 | |
---|
| 197 | def read(self, path): |
---|
| 198 | """ |
---|
| 199 | Load data file |
---|
| 200 | |
---|
| 201 | @param path: file path |
---|
| 202 | @return: Data1D object if a single SASentry was found, |
---|
| 203 | or a list of Data1D objects if multiple entries were found, |
---|
| 204 | or None of nothing was found |
---|
| 205 | @raise RuntimeError: when the file can't be opened |
---|
| 206 | @raise ValueError: when the length of the data vectors are inconsistent |
---|
| 207 | """ |
---|
| 208 | from xml.dom.minidom import parse |
---|
| 209 | |
---|
| 210 | output = [] |
---|
| 211 | |
---|
| 212 | if os.path.isfile(path): |
---|
| 213 | basename = os.path.basename(path) |
---|
| 214 | root, extension = os.path.splitext(basename) |
---|
| 215 | if extension.lower() in self.ext: |
---|
| 216 | |
---|
| 217 | dom = parse(path) |
---|
| 218 | |
---|
| 219 | # Check the format version number |
---|
| 220 | nodes = xpath.Evaluate('SASroot', dom) |
---|
| 221 | if nodes[0].hasAttributes(): |
---|
| 222 | for i in range(nodes[0].attributes.length): |
---|
| 223 | if nodes[0].attributes.item(i).nodeName=='version': |
---|
| 224 | if nodes[0].attributes.item(i).nodeValue != self.version: |
---|
| 225 | raise ValueError, "cansas_reader: unrecognized version number %s" % \ |
---|
| 226 | nodes[0].attributes.item(i).nodeValue |
---|
| 227 | |
---|
| 228 | entry_list = xpath.Evaluate('SASroot/SASentry', dom) |
---|
| 229 | for entry in entry_list: |
---|
| 230 | sas_entry = self._parse_entry(entry) |
---|
| 231 | sas_entry.filename = basename |
---|
| 232 | output.append(sas_entry) |
---|
| 233 | |
---|
| 234 | else: |
---|
| 235 | raise RuntimeError, "%s is not a file" % path |
---|
| 236 | |
---|
| 237 | # Return output consistent with the loader's api |
---|
| 238 | if len(output)==0: |
---|
| 239 | return None |
---|
| 240 | elif len(output)==1: |
---|
| 241 | return output[0] |
---|
| 242 | else: |
---|
| 243 | return output |
---|
| 244 | |
---|
| 245 | def _parse_entry(self, dom): |
---|
| 246 | """ |
---|
| 247 | Parse a SASentry |
---|
| 248 | |
---|
| 249 | @param node: SASentry node |
---|
| 250 | @return: Data1D object |
---|
| 251 | """ |
---|
| 252 | x = numpy.zeros(0) |
---|
| 253 | y = numpy.zeros(0) |
---|
| 254 | |
---|
| 255 | data_info = Data1D(x, y) |
---|
| 256 | |
---|
| 257 | # Look up title |
---|
| 258 | _store_content('Title', dom, 'title', data_info) |
---|
[579ba85] | 259 | # Look up run number |
---|
| 260 | nodes = xpath.Evaluate('Run', dom) |
---|
| 261 | for item in nodes: |
---|
| 262 | value, attr = get_node_text(item) |
---|
| 263 | if value is not None: |
---|
| 264 | data_info.run.append(value) |
---|
| 265 | if attr.has_key('name'): |
---|
| 266 | data_info.run_name[value] = attr['name'] |
---|
| 267 | |
---|
[8780e9a] | 268 | # Look up instrument name |
---|
[579ba85] | 269 | _store_content('SASinstrument/name', dom, 'instrument', data_info) |
---|
| 270 | #value, attr = get_content('SASinstrument', dom) |
---|
| 271 | #if attr.has_key('name'): |
---|
| 272 | # data_info.instrument = attr['name'] |
---|
[8780e9a] | 273 | |
---|
| 274 | note_list = xpath.Evaluate('SASnote', dom) |
---|
| 275 | for note in note_list: |
---|
| 276 | try: |
---|
| 277 | note_value, note_attr = get_node_text(note) |
---|
| 278 | if note_value is not None: |
---|
| 279 | data_info.notes.append(note_value) |
---|
| 280 | except: |
---|
| 281 | logging.error("cansas_reader.read: error processing entry notes\n %s" % sys.exc_value) |
---|
| 282 | |
---|
| 283 | |
---|
| 284 | # Sample info ################### |
---|
[579ba85] | 285 | value, attr = get_content('SASsample', dom) |
---|
| 286 | if attr.has_key('name'): |
---|
| 287 | data_info.sample.name = attr['name'] |
---|
| 288 | |
---|
[8780e9a] | 289 | _store_content('SASsample/ID', |
---|
| 290 | dom, 'ID', data_info.sample) |
---|
| 291 | _store_float('SASsample/thickness', |
---|
| 292 | dom, 'thickness', data_info.sample) |
---|
| 293 | _store_float('SASsample/transmission', |
---|
| 294 | dom, 'transmission', data_info.sample) |
---|
| 295 | _store_float('SASsample/temperature', |
---|
| 296 | dom, 'temperature', data_info.sample) |
---|
| 297 | nodes = xpath.Evaluate('SASsample/details', dom) |
---|
| 298 | for item in nodes: |
---|
| 299 | try: |
---|
| 300 | detail_value, detail_attr = get_node_text(item) |
---|
| 301 | if detail_value is not None: |
---|
| 302 | data_info.sample.details.append(detail_value) |
---|
| 303 | except: |
---|
| 304 | logging.error("cansas_reader.read: error processing sample details\n %s" % sys.exc_value) |
---|
| 305 | |
---|
| 306 | # Position (as a vector) |
---|
| 307 | _store_float('SASsample/position/x', |
---|
| 308 | dom, 'position.x', data_info.sample) |
---|
| 309 | _store_float('SASsample/position/y', |
---|
| 310 | dom, 'position.y', data_info.sample) |
---|
| 311 | _store_float('SASsample/position/z', |
---|
| 312 | dom, 'position.z', data_info.sample) |
---|
| 313 | |
---|
| 314 | # Orientation (as a vector) |
---|
| 315 | _store_float('SASsample/orientation/roll', |
---|
| 316 | dom, 'orientation.x', data_info.sample) |
---|
| 317 | _store_float('SASsample/orientation/pitch', |
---|
| 318 | dom, 'orientation.y', data_info.sample) |
---|
| 319 | _store_float('SASsample/orientation/yaw', |
---|
| 320 | dom, 'orientation.z', data_info.sample) |
---|
| 321 | |
---|
| 322 | # Source info ################### |
---|
[4c00964] | 323 | value, attr = get_content('SASinstrument/SASsource', dom) |
---|
| 324 | if attr.has_key('name'): |
---|
| 325 | data_info.source.name = attr['name'] |
---|
| 326 | |
---|
[8780e9a] | 327 | _store_content('SASinstrument/SASsource/radiation', |
---|
| 328 | dom, 'radiation', data_info.source) |
---|
| 329 | _store_content('SASinstrument/SASsource/beam_shape', |
---|
| 330 | dom, 'beam_shape', data_info.source) |
---|
| 331 | _store_float('SASinstrument/SASsource/wavelength', |
---|
| 332 | dom, 'wavelength', data_info.source) |
---|
| 333 | _store_float('SASinstrument/SASsource/wavelength_min', |
---|
| 334 | dom, 'wavelength_min', data_info.source) |
---|
| 335 | _store_float('SASinstrument/SASsource/wavelength_max', |
---|
| 336 | dom, 'wavelength_max', data_info.source) |
---|
| 337 | _store_float('SASinstrument/SASsource/wavelength_spread', |
---|
| 338 | dom, 'wavelength_spread', data_info.source) |
---|
| 339 | |
---|
[579ba85] | 340 | # Beam size (as a vector) |
---|
| 341 | value, attr = get_content('SASinstrument/SASsource/beam_size', dom) |
---|
| 342 | if attr.has_key('name'): |
---|
| 343 | data_info.source.beam_size_name = attr['name'] |
---|
| 344 | |
---|
[8780e9a] | 345 | _store_float('SASinstrument/SASsource/beam_size/x', |
---|
| 346 | dom, 'beam_size.x', data_info.source) |
---|
| 347 | _store_float('SASinstrument/SASsource/beam_size/y', |
---|
| 348 | dom, 'beam_size.y', data_info.source) |
---|
| 349 | _store_float('SASinstrument/SASsource/beam_size/z', |
---|
| 350 | dom, 'beam_size.z', data_info.source) |
---|
| 351 | |
---|
| 352 | # Collimation info ################### |
---|
| 353 | nodes = xpath.Evaluate('SASinstrument/SAScollimation', dom) |
---|
| 354 | for item in nodes: |
---|
| 355 | collim = Collimation() |
---|
[4c00964] | 356 | value, attr = get_node_text(item) |
---|
| 357 | if attr.has_key('name'): |
---|
| 358 | collim.name = attr['name'] |
---|
[8780e9a] | 359 | _store_float('length', item, 'length', collim) |
---|
| 360 | |
---|
| 361 | # Look for apertures |
---|
| 362 | apert_list = xpath.Evaluate('aperture', item) |
---|
| 363 | for apert in apert_list: |
---|
[d6513cd] | 364 | aperture = Aperture() |
---|
[4c00964] | 365 | |
---|
| 366 | # Get the name and type of the aperture |
---|
[579ba85] | 367 | ap_value, ap_attr = get_node_text(apert) |
---|
[4c00964] | 368 | if ap_attr.has_key('name'): |
---|
| 369 | aperture.name = ap_attr['name'] |
---|
| 370 | if ap_attr.has_key('type'): |
---|
| 371 | aperture.type = ap_attr['type'] |
---|
| 372 | |
---|
[8780e9a] | 373 | _store_float('distance', apert, 'distance', aperture) |
---|
[579ba85] | 374 | |
---|
| 375 | value, attr = get_content('size', apert) |
---|
| 376 | if attr.has_key('name'): |
---|
| 377 | aperture.size_name = attr['name'] |
---|
| 378 | |
---|
[8780e9a] | 379 | _store_float('size/x', apert, 'size.x', aperture) |
---|
| 380 | _store_float('size/y', apert, 'size.y', aperture) |
---|
| 381 | _store_float('size/z', apert, 'size.z', aperture) |
---|
| 382 | |
---|
| 383 | collim.aperture.append(aperture) |
---|
| 384 | |
---|
| 385 | data_info.collimation.append(collim) |
---|
| 386 | |
---|
| 387 | # Detector info ###################### |
---|
| 388 | nodes = xpath.Evaluate('SASinstrument/SASdetector', dom) |
---|
| 389 | for item in nodes: |
---|
| 390 | |
---|
| 391 | detector = Detector() |
---|
| 392 | |
---|
| 393 | _store_content('name', item, 'name', detector) |
---|
| 394 | _store_float('SDD', item, 'distance', detector) |
---|
| 395 | |
---|
| 396 | # Detector offset (as a vector) |
---|
| 397 | _store_float('offset/x', item, 'offset.x', detector) |
---|
| 398 | _store_float('offset/y', item, 'offset.y', detector) |
---|
| 399 | _store_float('offset/z', item, 'offset.z', detector) |
---|
| 400 | |
---|
| 401 | # Detector orientation (as a vector) |
---|
[579ba85] | 402 | _store_float('orientation/roll', item, 'orientation.x', detector) |
---|
| 403 | _store_float('orientation/pitch', item, 'orientation.y', detector) |
---|
| 404 | _store_float('orientation/yaw', item, 'orientation.z', detector) |
---|
[8780e9a] | 405 | |
---|
| 406 | # Beam center (as a vector) |
---|
| 407 | _store_float('beam_center/x', item, 'beam_center.x', detector) |
---|
| 408 | _store_float('beam_center/y', item, 'beam_center.y', detector) |
---|
| 409 | _store_float('beam_center/z', item, 'beam_center.z', detector) |
---|
| 410 | |
---|
| 411 | # Pixel size (as a vector) |
---|
| 412 | _store_float('pixel_size/x', item, 'pixel_size.x', detector) |
---|
| 413 | _store_float('pixel_size/y', item, 'pixel_size.y', detector) |
---|
| 414 | _store_float('pixel_size/z', item, 'pixel_size.z', detector) |
---|
| 415 | |
---|
| 416 | _store_float('slit_length', item, 'slit_length', detector) |
---|
| 417 | |
---|
| 418 | data_info.detector.append(detector) |
---|
| 419 | |
---|
| 420 | # Processes info ###################### |
---|
| 421 | nodes = xpath.Evaluate('SASprocess', dom) |
---|
| 422 | for item in nodes: |
---|
| 423 | process = Process() |
---|
| 424 | _store_content('name', item, 'name', process) |
---|
| 425 | _store_content('date', item, 'date', process) |
---|
| 426 | _store_content('description', item, 'description', process) |
---|
| 427 | |
---|
| 428 | term_list = xpath.Evaluate('term', item) |
---|
| 429 | for term in term_list: |
---|
| 430 | try: |
---|
| 431 | term_value, term_attr = get_node_text(term) |
---|
| 432 | term_attr['value'] = term_value |
---|
| 433 | if term_value is not None: |
---|
| 434 | process.term.append(term_attr) |
---|
| 435 | except: |
---|
| 436 | logging.error("cansas_reader.read: error processing process term\n %s" % sys.exc_value) |
---|
| 437 | |
---|
| 438 | note_list = xpath.Evaluate('SASprocessnote', item) |
---|
| 439 | for note in note_list: |
---|
| 440 | try: |
---|
| 441 | note_value, note_attr = get_node_text(note) |
---|
| 442 | if note_value is not None: |
---|
| 443 | process.notes.append(note_value) |
---|
| 444 | except: |
---|
| 445 | logging.error("cansas_reader.read: error processing process notes\n %s" % sys.exc_value) |
---|
| 446 | |
---|
| 447 | |
---|
| 448 | data_info.process.append(process) |
---|
| 449 | |
---|
| 450 | |
---|
| 451 | # Data info ###################### |
---|
[579ba85] | 452 | nodes = xpath.Evaluate('SASdata', dom) |
---|
| 453 | if len(nodes)>1: |
---|
| 454 | raise RuntimeError, "CanSAS reader is not compatible with multiple SASdata entries" |
---|
| 455 | |
---|
[8780e9a] | 456 | nodes = xpath.Evaluate('SASdata/Idata', dom) |
---|
| 457 | x = numpy.zeros(0) |
---|
| 458 | y = numpy.zeros(0) |
---|
| 459 | dx = numpy.zeros(0) |
---|
| 460 | dy = numpy.zeros(0) |
---|
| 461 | |
---|
| 462 | for item in nodes: |
---|
| 463 | _x, attr = get_float('Q', item) |
---|
| 464 | _dx, attr = get_float('Qdev', item) |
---|
| 465 | if _dx == None: |
---|
| 466 | _dx = 0.0 |
---|
| 467 | if attr.has_key('unit') and attr['unit'].lower() != data_info.x_unit.lower(): |
---|
| 468 | raise ValueError, "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ |
---|
| 469 | % (variable, attr['unit'], local_unit) |
---|
| 470 | |
---|
| 471 | _y, attr = get_float('I', item) |
---|
| 472 | _dy, attr = get_float('Idev', item) |
---|
| 473 | if _dy == None: |
---|
| 474 | _dy = 0.0 |
---|
| 475 | if attr.has_key('unit') and attr['unit'].lower() != data_info.y_unit.lower(): |
---|
| 476 | raise ValueError, "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ |
---|
| 477 | % (variable, attr['unit'], local_unit) |
---|
| 478 | |
---|
| 479 | if _x is not None and _y is not None: |
---|
| 480 | x = numpy.append(x, _x) |
---|
[579ba85] | 481 | y = numpy.append(y, _y) |
---|
| 482 | dx = numpy.append(dx, _dx) |
---|
| 483 | dy = numpy.append(dy, _dy) |
---|
[8780e9a] | 484 | |
---|
| 485 | data_info.x = x |
---|
| 486 | data_info.y = y |
---|
| 487 | data_info.dx = dx |
---|
| 488 | data_info.dy = dy |
---|
[d6513cd] | 489 | |
---|
| 490 | data_conv_q = None |
---|
| 491 | data_conv_i = None |
---|
| 492 | |
---|
| 493 | if has_converter == True and data_info.x_unit != '1/A': |
---|
| 494 | data_conv_q = Converter('1/A') |
---|
| 495 | # Test it |
---|
| 496 | data_conv_q(1.0, output.Q_unit) |
---|
| 497 | |
---|
| 498 | if has_converter == True and data_info.y_unit != '1/cm': |
---|
| 499 | data_conv_i = Converter('1/cm') |
---|
| 500 | # Test it |
---|
| 501 | data_conv_i(1.0, output.I_unit) |
---|
| 502 | |
---|
| 503 | |
---|
[99d1af6] | 504 | if data_conv_q is not None: |
---|
[d6513cd] | 505 | data_info.xaxis("\\rm{Q}", data_info.x_unit) |
---|
[99d1af6] | 506 | else: |
---|
| 507 | data_info.xaxis("\\rm{Q}", 'A^{-1}') |
---|
| 508 | if data_conv_i is not None: |
---|
[d6513cd] | 509 | data_info.yaxis("\\{I(Q)}", data_info.y_unit) |
---|
[99d1af6] | 510 | else: |
---|
| 511 | data_info.yaxis("\\rm{I(Q)}","cm^{-1}") |
---|
| 512 | |
---|
[8780e9a] | 513 | return data_info |
---|
| 514 | |
---|
[4c00964] | 515 | def write(self, filename, datainfo): |
---|
| 516 | """ |
---|
| 517 | Write the content of a Data1D as a CanSAS XML file |
---|
| 518 | |
---|
| 519 | @param filename: name of the file to write |
---|
| 520 | @param datainfo: Data1D object |
---|
| 521 | """ |
---|
| 522 | |
---|
| 523 | if not datainfo.__class__ == Data1D: |
---|
| 524 | raise RuntimeError, "The cansas writer expects a Data1D instance" |
---|
| 525 | |
---|
| 526 | doc = xml.dom.minidom.Document() |
---|
| 527 | main_node = doc.createElement("SASroot") |
---|
| 528 | main_node.setAttribute("version", "1.0") |
---|
| 529 | doc.appendChild(main_node) |
---|
| 530 | |
---|
| 531 | entry_node = doc.createElement("SASentry") |
---|
| 532 | main_node.appendChild(entry_node) |
---|
| 533 | |
---|
[579ba85] | 534 | write_node(doc, entry_node, "Title", datainfo.title) |
---|
| 535 | |
---|
| 536 | for item in datainfo.run: |
---|
| 537 | runname = {} |
---|
| 538 | if datainfo.run_name.has_key(item) and len(str(datainfo.run_name[item]))>1: |
---|
| 539 | runname = {'name': datainfo.run_name[item] } |
---|
| 540 | write_node(doc, entry_node, "Run", item, runname) |
---|
[4c00964] | 541 | |
---|
| 542 | # Data info |
---|
| 543 | node = doc.createElement("SASdata") |
---|
| 544 | entry_node.appendChild(node) |
---|
| 545 | |
---|
[579ba85] | 546 | for i in range(len(datainfo.x)): |
---|
| 547 | pt = doc.createElement("Idata") |
---|
| 548 | node.appendChild(pt) |
---|
| 549 | write_node(doc, pt, "Q", datainfo.x[i], {'unit':datainfo.x_unit}) |
---|
| 550 | if len(datainfo.y)>=i: |
---|
| 551 | write_node(doc, pt, "I", datainfo.y[i], {'unit':datainfo.y_unit}) |
---|
| 552 | if len(datainfo.dx)>=i: |
---|
| 553 | write_node(doc, pt, "Qdev", datainfo.dx[i], {'unit':datainfo.x_unit}) |
---|
| 554 | if len(datainfo.dy)>=i: |
---|
| 555 | write_node(doc, pt, "Idev", datainfo.dy[i], {'unit':datainfo.y_unit}) |
---|
| 556 | |
---|
| 557 | |
---|
[4c00964] | 558 | # Sample info |
---|
| 559 | sample = doc.createElement("SASsample") |
---|
[579ba85] | 560 | if datainfo.sample.name is not None: |
---|
| 561 | sample.setAttribute("name", str(datainfo.sample.name)) |
---|
[4c00964] | 562 | entry_node.appendChild(sample) |
---|
[579ba85] | 563 | write_node(doc, sample, "ID", str(datainfo.sample.ID)) |
---|
[4c00964] | 564 | write_node(doc, sample, "thickness", datainfo.sample.thickness, {"unit":datainfo.sample.thickness_unit}) |
---|
| 565 | write_node(doc, sample, "transmission", datainfo.sample.transmission) |
---|
| 566 | write_node(doc, sample, "temperature", datainfo.sample.temperature, {"unit":datainfo.sample.temperature_unit}) |
---|
| 567 | |
---|
| 568 | for item in datainfo.sample.details: |
---|
| 569 | write_node(doc, sample, "details", item) |
---|
| 570 | |
---|
| 571 | pos = doc.createElement("position") |
---|
[579ba85] | 572 | written = write_node(doc, pos, "x", datainfo.sample.position.x, {"unit":datainfo.sample.position_unit}) |
---|
| 573 | written = written | write_node(doc, pos, "y", datainfo.sample.position.y, {"unit":datainfo.sample.position_unit}) |
---|
| 574 | written = written | write_node(doc, pos, "z", datainfo.sample.position.z, {"unit":datainfo.sample.position_unit}) |
---|
[4c00964] | 575 | if written == True: |
---|
| 576 | sample.appendChild(pos) |
---|
| 577 | |
---|
| 578 | ori = doc.createElement("orientation") |
---|
[579ba85] | 579 | written = write_node(doc, ori, "roll", datainfo.sample.orientation.x, {"unit":datainfo.sample.orientation_unit}) |
---|
| 580 | written = written | write_node(doc, ori, "pitch", datainfo.sample.orientation.y, {"unit":datainfo.sample.orientation_unit}) |
---|
| 581 | written = written | write_node(doc, ori, "yaw", datainfo.sample.orientation.z, {"unit":datainfo.sample.orientation_unit}) |
---|
[4c00964] | 582 | if written == True: |
---|
| 583 | sample.appendChild(ori) |
---|
| 584 | |
---|
| 585 | # Instrument info |
---|
| 586 | instr = doc.createElement("SASinstrument") |
---|
| 587 | entry_node.appendChild(instr) |
---|
| 588 | |
---|
| 589 | write_node(doc, instr, "name", datainfo.instrument) |
---|
| 590 | |
---|
| 591 | # Source |
---|
| 592 | source = doc.createElement("SASsource") |
---|
[579ba85] | 593 | if datainfo.source.name is not None: |
---|
| 594 | source.setAttribute("name", str(datainfo.source.name)) |
---|
[4c00964] | 595 | instr.appendChild(source) |
---|
| 596 | |
---|
| 597 | write_node(doc, source, "radiation", datainfo.source.radiation) |
---|
| 598 | write_node(doc, source, "beam_shape", datainfo.source.beam_shape) |
---|
[579ba85] | 599 | size = doc.createElement("beam_size") |
---|
| 600 | if datainfo.source.beam_size_name is not None: |
---|
| 601 | size.setAttribute("name", str(datainfo.source.beam_size_name)) |
---|
| 602 | written = write_node(doc, size, "x", datainfo.source.beam_size.x, {"unit":datainfo.source.beam_size_unit}) |
---|
| 603 | written = written | write_node(doc, size, "y", datainfo.source.beam_size.y, {"unit":datainfo.source.beam_size_unit}) |
---|
| 604 | written = written | write_node(doc, size, "z", datainfo.source.beam_size.z, {"unit":datainfo.source.beam_size_unit}) |
---|
| 605 | if written == True: |
---|
| 606 | source.appendChild(size) |
---|
| 607 | |
---|
[4c00964] | 608 | write_node(doc, source, "wavelength", datainfo.source.wavelength, {"unit":datainfo.source.wavelength_unit}) |
---|
| 609 | write_node(doc, source, "wavelength_min", datainfo.source.wavelength_min, {"unit":datainfo.source.wavelength_min_unit}) |
---|
| 610 | write_node(doc, source, "wavelength_max", datainfo.source.wavelength_max, {"unit":datainfo.source.wavelength_max_unit}) |
---|
| 611 | write_node(doc, source, "wavelength_spread", datainfo.source.wavelength_spread, {"unit":datainfo.source.wavelength_spread_unit}) |
---|
| 612 | |
---|
| 613 | # Collimation |
---|
| 614 | for item in datainfo.collimation: |
---|
| 615 | coll = doc.createElement("SAScollimation") |
---|
[579ba85] | 616 | if item.name is not None: |
---|
| 617 | coll.setAttribute("name", str(item.name)) |
---|
[4c00964] | 618 | instr.appendChild(coll) |
---|
| 619 | |
---|
| 620 | write_node(doc, coll, "length", item.length, {"unit":item.length_unit}) |
---|
| 621 | |
---|
| 622 | for apert in item.aperture: |
---|
[579ba85] | 623 | ap = doc.createElement("aperture") |
---|
| 624 | if apert.name is not None: |
---|
| 625 | ap.setAttribute("name", str(apert.name)) |
---|
| 626 | if apert.type is not None: |
---|
| 627 | ap.setAttribute("type", str(apert.type)) |
---|
| 628 | coll.appendChild(ap) |
---|
[4c00964] | 629 | |
---|
| 630 | write_node(doc, ap, "distance", apert.distance, {"unit":apert.distance_unit}) |
---|
| 631 | |
---|
| 632 | size = doc.createElement("size") |
---|
[579ba85] | 633 | if apert.size_name is not None: |
---|
| 634 | size.setAttribute("name", str(apert.size_name)) |
---|
| 635 | written = write_node(doc, size, "x", apert.size.x, {"unit":apert.size_unit}) |
---|
| 636 | written = written | write_node(doc, size, "y", apert.size.y, {"unit":apert.size_unit}) |
---|
| 637 | written = written | write_node(doc, size, "z", apert.size.z, {"unit":apert.size_unit}) |
---|
| 638 | if written == True: |
---|
| 639 | ap.appendChild(size) |
---|
[4c00964] | 640 | |
---|
| 641 | # Detectors |
---|
| 642 | for item in datainfo.detector: |
---|
| 643 | det = doc.createElement("SASdetector") |
---|
[579ba85] | 644 | written = write_node(doc, det, "name", item.name) |
---|
| 645 | written = written | write_node(doc, det, "SDD", item.distance, {"unit":item.distance_unit}) |
---|
| 646 | written = written | write_node(doc, det, "slit_length", item.slit_length, {"unit":item.slit_length_unit}) |
---|
| 647 | if written == True: |
---|
| 648 | instr.appendChild(det) |
---|
[4c00964] | 649 | |
---|
| 650 | off = doc.createElement("offset") |
---|
[579ba85] | 651 | written = write_node(doc, off, "x", item.offset.x, {"unit":item.offset_unit}) |
---|
| 652 | written = written | write_node(doc, off, "y", item.offset.y, {"unit":item.offset_unit}) |
---|
| 653 | written = written | write_node(doc, off, "z", item.offset.z, {"unit":item.offset_unit}) |
---|
| 654 | if written == True: |
---|
| 655 | det.appendChild(off) |
---|
[4c00964] | 656 | |
---|
| 657 | center = doc.createElement("beam_center") |
---|
[579ba85] | 658 | written = write_node(doc, center, "x", item.beam_center.x, {"unit":item.beam_center_unit}) |
---|
| 659 | written = written | write_node(doc, center, "y", item.beam_center.y, {"unit":item.beam_center_unit}) |
---|
| 660 | written = written | write_node(doc, center, "z", item.beam_center.z, {"unit":item.beam_center_unit}) |
---|
| 661 | if written == True: |
---|
| 662 | det.appendChild(center) |
---|
| 663 | |
---|
[4c00964] | 664 | pix = doc.createElement("pixel_size") |
---|
[579ba85] | 665 | written = write_node(doc, pix, "x", item.pixel_size.x, {"unit":item.pixel_size_unit}) |
---|
| 666 | written = written | write_node(doc, pix, "y", item.pixel_size.y, {"unit":item.pixel_size_unit}) |
---|
| 667 | written = written | write_node(doc, pix, "z", item.pixel_size.z, {"unit":item.pixel_size_unit}) |
---|
| 668 | if written == True: |
---|
| 669 | det.appendChild(pix) |
---|
| 670 | |
---|
| 671 | ori = doc.createElement("orientation") |
---|
| 672 | written = write_node(doc, ori, "roll", item.orientation.x, {"unit":item.orientation_unit}) |
---|
| 673 | written = written | write_node(doc, ori, "pitch", item.orientation.y, {"unit":item.orientation_unit}) |
---|
| 674 | written = written | write_node(doc, ori, "yaw", item.orientation.z, {"unit":item.orientation_unit}) |
---|
| 675 | if written == True: |
---|
| 676 | det.appendChild(ori) |
---|
| 677 | |
---|
[4c00964] | 678 | |
---|
[579ba85] | 679 | # Processes info |
---|
[4c00964] | 680 | for item in datainfo.process: |
---|
| 681 | node = doc.createElement("SASprocess") |
---|
| 682 | entry_node.appendChild(node) |
---|
| 683 | |
---|
[579ba85] | 684 | write_node(doc, node, "name", item.name) |
---|
| 685 | write_node(doc, node, "date", item.date) |
---|
| 686 | write_node(doc, node, "description", item.description) |
---|
| 687 | for term in item.term: |
---|
| 688 | value = term['value'] |
---|
| 689 | del term['value'] |
---|
| 690 | write_node(doc, node, "term", value, term) |
---|
| 691 | for note in item.notes: |
---|
| 692 | write_node(doc, node, "SASprocessnote", note) |
---|
[4c00964] | 693 | |
---|
| 694 | |
---|
| 695 | # Write the file |
---|
| 696 | fd = open(filename, 'w') |
---|
| 697 | fd.write(doc.toprettyxml()) |
---|
| 698 | fd.close() |
---|
| 699 | |
---|
| 700 | |
---|
[8780e9a] | 701 | if __name__ == "__main__": |
---|
| 702 | logging.basicConfig(level=logging.ERROR, |
---|
| 703 | format='%(asctime)s %(levelname)s %(message)s', |
---|
| 704 | filename='cansas_reader.log', |
---|
| 705 | filemode='w') |
---|
| 706 | reader = Reader() |
---|
| 707 | print reader.read("../test/cansas1d.xml") |
---|
| 708 | |
---|
| 709 | |
---|
| 710 | |
---|