[8780e9a] | 1 | """ |
---|
| 2 | This software was developed by the University of Tennessee as part of the |
---|
| 3 | Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 4 | project funded by the US National Science Foundation. |
---|
| 5 | |
---|
| 6 | See the license text in license.txt |
---|
| 7 | |
---|
| 8 | copyright 2008, University of Tennessee |
---|
| 9 | """ |
---|
[579ba85] | 10 | # Known issue: reader not compatible with multiple SASdata entries |
---|
| 11 | # within a single SASentry. Will raise a runtime error. |
---|
[8780e9a] | 12 | |
---|
[4c00964] | 13 | #TODO: check that all vectors are written only if they have at least one non-empty value |
---|
[579ba85] | 14 | #TODO: Writing only allows one SASentry per file. Would be best to allow multiple entries. |
---|
[8780e9a] | 15 | #TODO: Store error list |
---|
| 16 | #TODO: Allow for additional meta data for each section |
---|
| 17 | #TODO: Notes need to be implemented. They can be any XML structure in version 1.0 |
---|
| 18 | # Process notes have the same problem. |
---|
[e390933] | 19 | #TODO: Unit conversion is not complete (temperature units are missing) |
---|
[8780e9a] | 20 | |
---|
| 21 | |
---|
| 22 | import logging |
---|
| 23 | import numpy |
---|
| 24 | import os, sys |
---|
[d6513cd] | 25 | from DataLoader.data_info import Data1D, Collimation, Detector, Process, Aperture |
---|
[8780e9a] | 26 | from xml import xpath |
---|
[4c00964] | 27 | import xml.dom.minidom |
---|
| 28 | |
---|
[8780e9a] | 29 | |
---|
[b39c817] | 30 | has_converter = True |
---|
| 31 | try: |
---|
| 32 | from data_util.nxsunit import Converter |
---|
| 33 | except: |
---|
| 34 | has_converter = False |
---|
| 35 | |
---|
[4c00964] | 36 | def write_node(doc, parent, name, value, attr={}): |
---|
| 37 | """ |
---|
| 38 | @param doc: document DOM |
---|
| 39 | @param parent: parent node |
---|
| 40 | @param name: tag of the element |
---|
| 41 | @param value: value of the child text node |
---|
| 42 | @param attr: attribute dictionary |
---|
| 43 | @return: True if something was appended, otherwise False |
---|
| 44 | """ |
---|
| 45 | if value is not None: |
---|
| 46 | node = doc.createElement(name) |
---|
| 47 | node.appendChild(doc.createTextNode(str(value))) |
---|
| 48 | for item in attr: |
---|
| 49 | node.setAttribute(item, attr[item]) |
---|
| 50 | parent.appendChild(node) |
---|
| 51 | return True |
---|
| 52 | return False |
---|
| 53 | |
---|
[8780e9a] | 54 | def get_node_text(node): |
---|
| 55 | """ |
---|
| 56 | Get the text context of a node |
---|
| 57 | |
---|
| 58 | @param node: node to read from |
---|
| 59 | @return: content, attribute list |
---|
| 60 | """ |
---|
| 61 | content = None |
---|
| 62 | attr = {} |
---|
| 63 | for item in node.childNodes: |
---|
| 64 | if item.nodeName.find('text')>=0 \ |
---|
| 65 | and len(item.nodeValue.strip())>0: |
---|
| 66 | content = item.nodeValue.strip() |
---|
| 67 | break |
---|
| 68 | |
---|
| 69 | if node.hasAttributes(): |
---|
| 70 | for i in range(node.attributes.length): |
---|
| 71 | attr[node.attributes.item(i).nodeName] \ |
---|
| 72 | = node.attributes.item(i).nodeValue |
---|
| 73 | |
---|
| 74 | return content, attr |
---|
| 75 | |
---|
| 76 | def get_content(location, node): |
---|
| 77 | """ |
---|
| 78 | Get the first instance of the content of a xpath location |
---|
| 79 | |
---|
| 80 | @param location: xpath location |
---|
| 81 | @param node: node to start at |
---|
| 82 | """ |
---|
| 83 | value = None |
---|
| 84 | attr = {} |
---|
| 85 | nodes = xpath.Evaluate(location, node) |
---|
| 86 | if len(nodes)>0: |
---|
| 87 | try: |
---|
| 88 | # Skip comments and empty lines |
---|
| 89 | for item in nodes[0].childNodes: |
---|
| 90 | if item.nodeName.find('text')>=0 \ |
---|
| 91 | and len(item.nodeValue.strip())>0: |
---|
| 92 | value = item.nodeValue.strip() |
---|
| 93 | break |
---|
| 94 | |
---|
[d6513cd] | 95 | if nodes[0].hasAttributes(): |
---|
| 96 | for i in range(nodes[0].attributes.length): |
---|
| 97 | attr[nodes[0].attributes.item(i).nodeName] \ |
---|
| 98 | = nodes[0].attributes.item(i).nodeValue |
---|
[8780e9a] | 99 | except: |
---|
| 100 | # problem reading the node. Skip it and return that |
---|
| 101 | # nothing was found |
---|
| 102 | logging.error("cansas_reader.get_content: %s\n %s" % (location, sys.exc_value)) |
---|
| 103 | |
---|
| 104 | return value, attr |
---|
| 105 | |
---|
| 106 | def get_float(location, node): |
---|
| 107 | """ |
---|
| 108 | Get the content of a node as a float |
---|
| 109 | |
---|
| 110 | @param location: xpath location |
---|
| 111 | @param node: node to start at |
---|
| 112 | """ |
---|
| 113 | value = None |
---|
| 114 | attr = {} |
---|
| 115 | content, attr = get_content(location, node) |
---|
| 116 | if content is not None: |
---|
| 117 | try: |
---|
[b39c817] | 118 | value = float(content) |
---|
[8780e9a] | 119 | except: |
---|
| 120 | # Could not pass, skip and return None |
---|
| 121 | logging.error("cansas_reader.get_float: could not convert '%s' to float" % content) |
---|
| 122 | |
---|
| 123 | return value, attr |
---|
| 124 | |
---|
| 125 | def _store_float(location, node, variable, storage): |
---|
| 126 | """ |
---|
| 127 | Get the content of a xpath location and store |
---|
| 128 | the result. Check that the units are compatible |
---|
| 129 | with the destination. The value is expected to |
---|
| 130 | be a float. |
---|
| 131 | |
---|
| 132 | The xpath location might or might not exist. |
---|
| 133 | If it does not exist, nothing is done |
---|
| 134 | |
---|
| 135 | @param location: xpath location to fetch |
---|
| 136 | @param node: node to read the data from |
---|
| 137 | @param variable: name of the data member to store it in [string] |
---|
| 138 | @param storage: data object that has the 'variable' data member |
---|
| 139 | |
---|
| 140 | @raise ValueError: raised when the units are not recognized |
---|
| 141 | """ |
---|
| 142 | value, attr = get_float(location, node) |
---|
| 143 | if value is not None: |
---|
| 144 | # If the entry has units, check to see that they are |
---|
| 145 | # compatible with what we currently have in the data object |
---|
| 146 | if attr.has_key('unit'): |
---|
| 147 | toks = variable.split('.') |
---|
[e390933] | 148 | exec "local_unit = storage.%s_unit" % toks[0] |
---|
| 149 | if attr['unit'].lower()!=local_unit.lower(): |
---|
[b39c817] | 150 | if has_converter==True: |
---|
| 151 | try: |
---|
| 152 | conv = Converter(attr['unit']) |
---|
| 153 | exec "storage.%s = %g" % (variable, conv(value, units=local_unit)) |
---|
| 154 | except: |
---|
| 155 | raise ValueError, "CanSAS reader: could not convert %s unit [%s]; expecting [%s]\n %s" \ |
---|
| 156 | % (variable, attr['unit'], local_unit, sys.exc_value) |
---|
| 157 | else: |
---|
| 158 | raise ValueError, "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ |
---|
| 159 | % (variable, attr['unit'], local_unit) |
---|
| 160 | else: |
---|
| 161 | exec "storage.%s = value" % variable |
---|
| 162 | else: |
---|
| 163 | exec "storage.%s = value" % variable |
---|
| 164 | |
---|
[8780e9a] | 165 | |
---|
| 166 | def _store_content(location, node, variable, storage): |
---|
| 167 | """ |
---|
| 168 | Get the content of a xpath location and store |
---|
| 169 | the result. The value is treated as a string. |
---|
| 170 | |
---|
| 171 | The xpath location might or might not exist. |
---|
| 172 | If it does not exist, nothing is done |
---|
| 173 | |
---|
| 174 | @param location: xpath location to fetch |
---|
| 175 | @param node: node to read the data from |
---|
| 176 | @param variable: name of the data member to store it in [string] |
---|
| 177 | @param storage: data object that has the 'variable' data member |
---|
| 178 | """ |
---|
| 179 | value, attr = get_content(location, node) |
---|
| 180 | if value is not None: |
---|
| 181 | exec "storage.%s = value" % variable |
---|
| 182 | |
---|
| 183 | |
---|
| 184 | class Reader: |
---|
| 185 | """ |
---|
| 186 | Class to load cansas 1D XML files |
---|
| 187 | |
---|
| 188 | Dependencies: |
---|
| 189 | The CanSas reader requires PyXML 0.8.4 or later. |
---|
| 190 | """ |
---|
| 191 | ## CanSAS version |
---|
| 192 | version = '1.0' |
---|
| 193 | ## File type |
---|
| 194 | type = ["CanSAS 1D files (*.xml)|*.xml"] |
---|
| 195 | ## List of allowed extensions |
---|
| 196 | ext=['.xml', '.XML'] |
---|
| 197 | |
---|
| 198 | def read(self, path): |
---|
| 199 | """ |
---|
| 200 | Load data file |
---|
| 201 | |
---|
| 202 | @param path: file path |
---|
| 203 | @return: Data1D object if a single SASentry was found, |
---|
| 204 | or a list of Data1D objects if multiple entries were found, |
---|
| 205 | or None of nothing was found |
---|
| 206 | @raise RuntimeError: when the file can't be opened |
---|
| 207 | @raise ValueError: when the length of the data vectors are inconsistent |
---|
| 208 | """ |
---|
| 209 | from xml.dom.minidom import parse |
---|
| 210 | |
---|
| 211 | output = [] |
---|
| 212 | |
---|
| 213 | if os.path.isfile(path): |
---|
| 214 | basename = os.path.basename(path) |
---|
| 215 | root, extension = os.path.splitext(basename) |
---|
| 216 | if extension.lower() in self.ext: |
---|
| 217 | |
---|
| 218 | dom = parse(path) |
---|
| 219 | |
---|
| 220 | # Check the format version number |
---|
| 221 | nodes = xpath.Evaluate('SASroot', dom) |
---|
| 222 | if nodes[0].hasAttributes(): |
---|
| 223 | for i in range(nodes[0].attributes.length): |
---|
| 224 | if nodes[0].attributes.item(i).nodeName=='version': |
---|
| 225 | if nodes[0].attributes.item(i).nodeValue != self.version: |
---|
| 226 | raise ValueError, "cansas_reader: unrecognized version number %s" % \ |
---|
| 227 | nodes[0].attributes.item(i).nodeValue |
---|
| 228 | |
---|
| 229 | entry_list = xpath.Evaluate('SASroot/SASentry', dom) |
---|
| 230 | for entry in entry_list: |
---|
| 231 | sas_entry = self._parse_entry(entry) |
---|
| 232 | sas_entry.filename = basename |
---|
| 233 | output.append(sas_entry) |
---|
| 234 | |
---|
| 235 | else: |
---|
| 236 | raise RuntimeError, "%s is not a file" % path |
---|
| 237 | |
---|
| 238 | # Return output consistent with the loader's api |
---|
| 239 | if len(output)==0: |
---|
| 240 | return None |
---|
| 241 | elif len(output)==1: |
---|
| 242 | return output[0] |
---|
| 243 | else: |
---|
| 244 | return output |
---|
| 245 | |
---|
| 246 | def _parse_entry(self, dom): |
---|
| 247 | """ |
---|
| 248 | Parse a SASentry |
---|
| 249 | |
---|
| 250 | @param node: SASentry node |
---|
| 251 | @return: Data1D object |
---|
| 252 | """ |
---|
| 253 | x = numpy.zeros(0) |
---|
| 254 | y = numpy.zeros(0) |
---|
| 255 | |
---|
| 256 | data_info = Data1D(x, y) |
---|
| 257 | |
---|
| 258 | # Look up title |
---|
| 259 | _store_content('Title', dom, 'title', data_info) |
---|
[579ba85] | 260 | # Look up run number |
---|
| 261 | nodes = xpath.Evaluate('Run', dom) |
---|
| 262 | for item in nodes: |
---|
| 263 | value, attr = get_node_text(item) |
---|
| 264 | if value is not None: |
---|
| 265 | data_info.run.append(value) |
---|
| 266 | if attr.has_key('name'): |
---|
| 267 | data_info.run_name[value] = attr['name'] |
---|
| 268 | |
---|
[8780e9a] | 269 | # Look up instrument name |
---|
[579ba85] | 270 | _store_content('SASinstrument/name', dom, 'instrument', data_info) |
---|
| 271 | #value, attr = get_content('SASinstrument', dom) |
---|
| 272 | #if attr.has_key('name'): |
---|
| 273 | # data_info.instrument = attr['name'] |
---|
[8780e9a] | 274 | |
---|
| 275 | note_list = xpath.Evaluate('SASnote', dom) |
---|
| 276 | for note in note_list: |
---|
| 277 | try: |
---|
| 278 | note_value, note_attr = get_node_text(note) |
---|
| 279 | if note_value is not None: |
---|
| 280 | data_info.notes.append(note_value) |
---|
| 281 | except: |
---|
| 282 | logging.error("cansas_reader.read: error processing entry notes\n %s" % sys.exc_value) |
---|
| 283 | |
---|
| 284 | |
---|
| 285 | # Sample info ################### |
---|
[579ba85] | 286 | value, attr = get_content('SASsample', dom) |
---|
| 287 | if attr.has_key('name'): |
---|
| 288 | data_info.sample.name = attr['name'] |
---|
| 289 | |
---|
[8780e9a] | 290 | _store_content('SASsample/ID', |
---|
| 291 | dom, 'ID', data_info.sample) |
---|
| 292 | _store_float('SASsample/thickness', |
---|
| 293 | dom, 'thickness', data_info.sample) |
---|
| 294 | _store_float('SASsample/transmission', |
---|
| 295 | dom, 'transmission', data_info.sample) |
---|
| 296 | _store_float('SASsample/temperature', |
---|
| 297 | dom, 'temperature', data_info.sample) |
---|
| 298 | nodes = xpath.Evaluate('SASsample/details', dom) |
---|
| 299 | for item in nodes: |
---|
| 300 | try: |
---|
| 301 | detail_value, detail_attr = get_node_text(item) |
---|
| 302 | if detail_value is not None: |
---|
| 303 | data_info.sample.details.append(detail_value) |
---|
| 304 | except: |
---|
| 305 | logging.error("cansas_reader.read: error processing sample details\n %s" % sys.exc_value) |
---|
| 306 | |
---|
| 307 | # Position (as a vector) |
---|
| 308 | _store_float('SASsample/position/x', |
---|
| 309 | dom, 'position.x', data_info.sample) |
---|
| 310 | _store_float('SASsample/position/y', |
---|
| 311 | dom, 'position.y', data_info.sample) |
---|
| 312 | _store_float('SASsample/position/z', |
---|
| 313 | dom, 'position.z', data_info.sample) |
---|
| 314 | |
---|
| 315 | # Orientation (as a vector) |
---|
| 316 | _store_float('SASsample/orientation/roll', |
---|
| 317 | dom, 'orientation.x', data_info.sample) |
---|
| 318 | _store_float('SASsample/orientation/pitch', |
---|
| 319 | dom, 'orientation.y', data_info.sample) |
---|
| 320 | _store_float('SASsample/orientation/yaw', |
---|
| 321 | dom, 'orientation.z', data_info.sample) |
---|
| 322 | |
---|
| 323 | # Source info ################### |
---|
[4c00964] | 324 | value, attr = get_content('SASinstrument/SASsource', dom) |
---|
| 325 | if attr.has_key('name'): |
---|
| 326 | data_info.source.name = attr['name'] |
---|
| 327 | |
---|
[8780e9a] | 328 | _store_content('SASinstrument/SASsource/radiation', |
---|
| 329 | dom, 'radiation', data_info.source) |
---|
| 330 | _store_content('SASinstrument/SASsource/beam_shape', |
---|
| 331 | dom, 'beam_shape', data_info.source) |
---|
| 332 | _store_float('SASinstrument/SASsource/wavelength', |
---|
| 333 | dom, 'wavelength', data_info.source) |
---|
| 334 | _store_float('SASinstrument/SASsource/wavelength_min', |
---|
| 335 | dom, 'wavelength_min', data_info.source) |
---|
| 336 | _store_float('SASinstrument/SASsource/wavelength_max', |
---|
| 337 | dom, 'wavelength_max', data_info.source) |
---|
| 338 | _store_float('SASinstrument/SASsource/wavelength_spread', |
---|
| 339 | dom, 'wavelength_spread', data_info.source) |
---|
| 340 | |
---|
[579ba85] | 341 | # Beam size (as a vector) |
---|
| 342 | value, attr = get_content('SASinstrument/SASsource/beam_size', dom) |
---|
| 343 | if attr.has_key('name'): |
---|
| 344 | data_info.source.beam_size_name = attr['name'] |
---|
| 345 | |
---|
[8780e9a] | 346 | _store_float('SASinstrument/SASsource/beam_size/x', |
---|
| 347 | dom, 'beam_size.x', data_info.source) |
---|
| 348 | _store_float('SASinstrument/SASsource/beam_size/y', |
---|
| 349 | dom, 'beam_size.y', data_info.source) |
---|
| 350 | _store_float('SASinstrument/SASsource/beam_size/z', |
---|
| 351 | dom, 'beam_size.z', data_info.source) |
---|
| 352 | |
---|
| 353 | # Collimation info ################### |
---|
| 354 | nodes = xpath.Evaluate('SASinstrument/SAScollimation', dom) |
---|
| 355 | for item in nodes: |
---|
| 356 | collim = Collimation() |
---|
[4c00964] | 357 | value, attr = get_node_text(item) |
---|
| 358 | if attr.has_key('name'): |
---|
| 359 | collim.name = attr['name'] |
---|
[8780e9a] | 360 | _store_float('length', item, 'length', collim) |
---|
| 361 | |
---|
| 362 | # Look for apertures |
---|
| 363 | apert_list = xpath.Evaluate('aperture', item) |
---|
| 364 | for apert in apert_list: |
---|
[d6513cd] | 365 | aperture = Aperture() |
---|
[4c00964] | 366 | |
---|
| 367 | # Get the name and type of the aperture |
---|
[579ba85] | 368 | ap_value, ap_attr = get_node_text(apert) |
---|
[4c00964] | 369 | if ap_attr.has_key('name'): |
---|
| 370 | aperture.name = ap_attr['name'] |
---|
| 371 | if ap_attr.has_key('type'): |
---|
| 372 | aperture.type = ap_attr['type'] |
---|
| 373 | |
---|
[8780e9a] | 374 | _store_float('distance', apert, 'distance', aperture) |
---|
[579ba85] | 375 | |
---|
| 376 | value, attr = get_content('size', apert) |
---|
| 377 | if attr.has_key('name'): |
---|
| 378 | aperture.size_name = attr['name'] |
---|
| 379 | |
---|
[8780e9a] | 380 | _store_float('size/x', apert, 'size.x', aperture) |
---|
| 381 | _store_float('size/y', apert, 'size.y', aperture) |
---|
| 382 | _store_float('size/z', apert, 'size.z', aperture) |
---|
| 383 | |
---|
| 384 | collim.aperture.append(aperture) |
---|
| 385 | |
---|
| 386 | data_info.collimation.append(collim) |
---|
| 387 | |
---|
| 388 | # Detector info ###################### |
---|
| 389 | nodes = xpath.Evaluate('SASinstrument/SASdetector', dom) |
---|
| 390 | for item in nodes: |
---|
| 391 | |
---|
| 392 | detector = Detector() |
---|
| 393 | |
---|
| 394 | _store_content('name', item, 'name', detector) |
---|
| 395 | _store_float('SDD', item, 'distance', detector) |
---|
| 396 | |
---|
| 397 | # Detector offset (as a vector) |
---|
| 398 | _store_float('offset/x', item, 'offset.x', detector) |
---|
| 399 | _store_float('offset/y', item, 'offset.y', detector) |
---|
| 400 | _store_float('offset/z', item, 'offset.z', detector) |
---|
| 401 | |
---|
| 402 | # Detector orientation (as a vector) |
---|
[579ba85] | 403 | _store_float('orientation/roll', item, 'orientation.x', detector) |
---|
| 404 | _store_float('orientation/pitch', item, 'orientation.y', detector) |
---|
| 405 | _store_float('orientation/yaw', item, 'orientation.z', detector) |
---|
[8780e9a] | 406 | |
---|
| 407 | # Beam center (as a vector) |
---|
| 408 | _store_float('beam_center/x', item, 'beam_center.x', detector) |
---|
| 409 | _store_float('beam_center/y', item, 'beam_center.y', detector) |
---|
| 410 | _store_float('beam_center/z', item, 'beam_center.z', detector) |
---|
| 411 | |
---|
| 412 | # Pixel size (as a vector) |
---|
| 413 | _store_float('pixel_size/x', item, 'pixel_size.x', detector) |
---|
| 414 | _store_float('pixel_size/y', item, 'pixel_size.y', detector) |
---|
| 415 | _store_float('pixel_size/z', item, 'pixel_size.z', detector) |
---|
| 416 | |
---|
| 417 | _store_float('slit_length', item, 'slit_length', detector) |
---|
| 418 | |
---|
| 419 | data_info.detector.append(detector) |
---|
| 420 | |
---|
| 421 | # Processes info ###################### |
---|
| 422 | nodes = xpath.Evaluate('SASprocess', dom) |
---|
| 423 | for item in nodes: |
---|
| 424 | process = Process() |
---|
| 425 | _store_content('name', item, 'name', process) |
---|
| 426 | _store_content('date', item, 'date', process) |
---|
| 427 | _store_content('description', item, 'description', process) |
---|
| 428 | |
---|
| 429 | term_list = xpath.Evaluate('term', item) |
---|
| 430 | for term in term_list: |
---|
| 431 | try: |
---|
| 432 | term_value, term_attr = get_node_text(term) |
---|
| 433 | term_attr['value'] = term_value |
---|
| 434 | if term_value is not None: |
---|
| 435 | process.term.append(term_attr) |
---|
| 436 | except: |
---|
| 437 | logging.error("cansas_reader.read: error processing process term\n %s" % sys.exc_value) |
---|
| 438 | |
---|
| 439 | note_list = xpath.Evaluate('SASprocessnote', item) |
---|
| 440 | for note in note_list: |
---|
| 441 | try: |
---|
| 442 | note_value, note_attr = get_node_text(note) |
---|
| 443 | if note_value is not None: |
---|
| 444 | process.notes.append(note_value) |
---|
| 445 | except: |
---|
| 446 | logging.error("cansas_reader.read: error processing process notes\n %s" % sys.exc_value) |
---|
| 447 | |
---|
| 448 | |
---|
| 449 | data_info.process.append(process) |
---|
| 450 | |
---|
| 451 | |
---|
| 452 | # Data info ###################### |
---|
[579ba85] | 453 | nodes = xpath.Evaluate('SASdata', dom) |
---|
| 454 | if len(nodes)>1: |
---|
| 455 | raise RuntimeError, "CanSAS reader is not compatible with multiple SASdata entries" |
---|
| 456 | |
---|
[8780e9a] | 457 | nodes = xpath.Evaluate('SASdata/Idata', dom) |
---|
| 458 | x = numpy.zeros(0) |
---|
| 459 | y = numpy.zeros(0) |
---|
| 460 | dx = numpy.zeros(0) |
---|
| 461 | dy = numpy.zeros(0) |
---|
| 462 | |
---|
| 463 | for item in nodes: |
---|
| 464 | _x, attr = get_float('Q', item) |
---|
[e390933] | 465 | _dx, attr_d = get_float('Qdev', item) |
---|
[8780e9a] | 466 | if _dx == None: |
---|
| 467 | _dx = 0.0 |
---|
| 468 | |
---|
[e390933] | 469 | if attr.has_key('unit') and attr['unit'].lower() != data_info.x_unit.lower(): |
---|
| 470 | if has_converter==True: |
---|
| 471 | try: |
---|
| 472 | data_conv_q = Converter(attr['unit']) |
---|
| 473 | _x = data_conv_q(_x, units=data_info.x_unit) |
---|
| 474 | except: |
---|
| 475 | raise ValueError, "CanSAS reader: could not convert Q unit [%s]; expecting [%s]\n %s" \ |
---|
| 476 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 477 | else: |
---|
| 478 | raise ValueError, "CanSAS reader: unrecognized Q unit [%s]; expecting [%s]" \ |
---|
| 479 | % (attr['unit'], data_info.x_unit) |
---|
| 480 | if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.x_unit.lower(): |
---|
| 481 | if has_converter==True: |
---|
| 482 | try: |
---|
| 483 | data_conv_q = Converter(attr_d['unit']) |
---|
| 484 | _dx = data_conv_q(_dx, units=data_info.x_unit) |
---|
| 485 | except: |
---|
| 486 | raise ValueError, "CanSAS reader: could not convert dQ unit [%s]; expecting [%s]\n %s" \ |
---|
| 487 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 488 | else: |
---|
| 489 | raise ValueError, "CanSAS reader: unrecognized dQ unit [%s]; expecting [%s]" \ |
---|
| 490 | % (attr['unit'], data_info.x_unit) |
---|
| 491 | |
---|
[8780e9a] | 492 | _y, attr = get_float('I', item) |
---|
[e390933] | 493 | _dy, attr_d = get_float('Idev', item) |
---|
[8780e9a] | 494 | if _dy == None: |
---|
| 495 | _dy = 0.0 |
---|
| 496 | if attr.has_key('unit') and attr['unit'].lower() != data_info.y_unit.lower(): |
---|
[e390933] | 497 | if has_converter==True: |
---|
| 498 | try: |
---|
| 499 | data_conv_i = Converter(attr['unit']) |
---|
| 500 | _y = data_conv_i(_y, units=data_info.y_unit) |
---|
| 501 | except: |
---|
| 502 | raise ValueError, "CanSAS reader: could not convert I(q) unit [%s]; expecting [%s]\n %s" \ |
---|
| 503 | % (attr['unit'], data_info.y_unit, sys.exc_value) |
---|
| 504 | else: |
---|
| 505 | raise ValueError, "CanSAS reader: unrecognized I(q) unit [%s]; expecting [%s]" \ |
---|
| 506 | % (attr['unit'], data_info.y_unit) |
---|
| 507 | if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.y_unit.lower(): |
---|
| 508 | if has_converter==True: |
---|
| 509 | try: |
---|
| 510 | data_conv_i = Converter(attr_d['unit']) |
---|
| 511 | _dy = data_conv_i(_dy, units=data_info.y_unit) |
---|
| 512 | except: |
---|
| 513 | raise ValueError, "CanSAS reader: could not convert dI(q) unit [%s]; expecting [%s]\n %s" \ |
---|
| 514 | % (attr_d['unit'], data_info.y_unit, sys.exc_value) |
---|
| 515 | else: |
---|
| 516 | raise ValueError, "CanSAS reader: unrecognized dI(q) unit [%s]; expecting [%s]" \ |
---|
| 517 | % (attr_d['unit'], data_info.y_unit) |
---|
[8780e9a] | 518 | |
---|
| 519 | if _x is not None and _y is not None: |
---|
| 520 | x = numpy.append(x, _x) |
---|
[579ba85] | 521 | y = numpy.append(y, _y) |
---|
| 522 | dx = numpy.append(dx, _dx) |
---|
| 523 | dy = numpy.append(dy, _dy) |
---|
[8780e9a] | 524 | |
---|
| 525 | data_info.x = x |
---|
| 526 | data_info.y = y |
---|
| 527 | data_info.dx = dx |
---|
| 528 | data_info.dy = dy |
---|
[d6513cd] | 529 | |
---|
| 530 | data_conv_q = None |
---|
| 531 | data_conv_i = None |
---|
| 532 | |
---|
| 533 | if has_converter == True and data_info.x_unit != '1/A': |
---|
| 534 | data_conv_q = Converter('1/A') |
---|
| 535 | # Test it |
---|
| 536 | data_conv_q(1.0, output.Q_unit) |
---|
| 537 | |
---|
| 538 | if has_converter == True and data_info.y_unit != '1/cm': |
---|
| 539 | data_conv_i = Converter('1/cm') |
---|
| 540 | # Test it |
---|
[e390933] | 541 | data_conv_i(1.0, output.I_unit) |
---|
| 542 | |
---|
[99d1af6] | 543 | if data_conv_q is not None: |
---|
[d6513cd] | 544 | data_info.xaxis("\\rm{Q}", data_info.x_unit) |
---|
[99d1af6] | 545 | else: |
---|
| 546 | data_info.xaxis("\\rm{Q}", 'A^{-1}') |
---|
| 547 | if data_conv_i is not None: |
---|
[d6513cd] | 548 | data_info.yaxis("\\{I(Q)}", data_info.y_unit) |
---|
[99d1af6] | 549 | else: |
---|
| 550 | data_info.yaxis("\\rm{I(Q)}","cm^{-1}") |
---|
| 551 | |
---|
[8780e9a] | 552 | return data_info |
---|
| 553 | |
---|
[4c00964] | 554 | def write(self, filename, datainfo): |
---|
| 555 | """ |
---|
| 556 | Write the content of a Data1D as a CanSAS XML file |
---|
| 557 | |
---|
| 558 | @param filename: name of the file to write |
---|
| 559 | @param datainfo: Data1D object |
---|
| 560 | """ |
---|
| 561 | |
---|
| 562 | if not datainfo.__class__ == Data1D: |
---|
| 563 | raise RuntimeError, "The cansas writer expects a Data1D instance" |
---|
| 564 | |
---|
| 565 | doc = xml.dom.minidom.Document() |
---|
| 566 | main_node = doc.createElement("SASroot") |
---|
| 567 | main_node.setAttribute("version", "1.0") |
---|
| 568 | doc.appendChild(main_node) |
---|
| 569 | |
---|
| 570 | entry_node = doc.createElement("SASentry") |
---|
| 571 | main_node.appendChild(entry_node) |
---|
| 572 | |
---|
[579ba85] | 573 | write_node(doc, entry_node, "Title", datainfo.title) |
---|
| 574 | |
---|
| 575 | for item in datainfo.run: |
---|
| 576 | runname = {} |
---|
| 577 | if datainfo.run_name.has_key(item) and len(str(datainfo.run_name[item]))>1: |
---|
| 578 | runname = {'name': datainfo.run_name[item] } |
---|
| 579 | write_node(doc, entry_node, "Run", item, runname) |
---|
[4c00964] | 580 | |
---|
| 581 | # Data info |
---|
| 582 | node = doc.createElement("SASdata") |
---|
| 583 | entry_node.appendChild(node) |
---|
| 584 | |
---|
[579ba85] | 585 | for i in range(len(datainfo.x)): |
---|
| 586 | pt = doc.createElement("Idata") |
---|
| 587 | node.appendChild(pt) |
---|
| 588 | write_node(doc, pt, "Q", datainfo.x[i], {'unit':datainfo.x_unit}) |
---|
| 589 | if len(datainfo.y)>=i: |
---|
| 590 | write_node(doc, pt, "I", datainfo.y[i], {'unit':datainfo.y_unit}) |
---|
| 591 | if len(datainfo.dx)>=i: |
---|
| 592 | write_node(doc, pt, "Qdev", datainfo.dx[i], {'unit':datainfo.x_unit}) |
---|
| 593 | if len(datainfo.dy)>=i: |
---|
| 594 | write_node(doc, pt, "Idev", datainfo.dy[i], {'unit':datainfo.y_unit}) |
---|
| 595 | |
---|
| 596 | |
---|
[4c00964] | 597 | # Sample info |
---|
| 598 | sample = doc.createElement("SASsample") |
---|
[579ba85] | 599 | if datainfo.sample.name is not None: |
---|
| 600 | sample.setAttribute("name", str(datainfo.sample.name)) |
---|
[4c00964] | 601 | entry_node.appendChild(sample) |
---|
[579ba85] | 602 | write_node(doc, sample, "ID", str(datainfo.sample.ID)) |
---|
[4c00964] | 603 | write_node(doc, sample, "thickness", datainfo.sample.thickness, {"unit":datainfo.sample.thickness_unit}) |
---|
| 604 | write_node(doc, sample, "transmission", datainfo.sample.transmission) |
---|
| 605 | write_node(doc, sample, "temperature", datainfo.sample.temperature, {"unit":datainfo.sample.temperature_unit}) |
---|
| 606 | |
---|
| 607 | for item in datainfo.sample.details: |
---|
| 608 | write_node(doc, sample, "details", item) |
---|
| 609 | |
---|
| 610 | pos = doc.createElement("position") |
---|
[579ba85] | 611 | written = write_node(doc, pos, "x", datainfo.sample.position.x, {"unit":datainfo.sample.position_unit}) |
---|
| 612 | written = written | write_node(doc, pos, "y", datainfo.sample.position.y, {"unit":datainfo.sample.position_unit}) |
---|
| 613 | written = written | write_node(doc, pos, "z", datainfo.sample.position.z, {"unit":datainfo.sample.position_unit}) |
---|
[4c00964] | 614 | if written == True: |
---|
| 615 | sample.appendChild(pos) |
---|
| 616 | |
---|
| 617 | ori = doc.createElement("orientation") |
---|
[579ba85] | 618 | written = write_node(doc, ori, "roll", datainfo.sample.orientation.x, {"unit":datainfo.sample.orientation_unit}) |
---|
| 619 | written = written | write_node(doc, ori, "pitch", datainfo.sample.orientation.y, {"unit":datainfo.sample.orientation_unit}) |
---|
| 620 | written = written | write_node(doc, ori, "yaw", datainfo.sample.orientation.z, {"unit":datainfo.sample.orientation_unit}) |
---|
[4c00964] | 621 | if written == True: |
---|
| 622 | sample.appendChild(ori) |
---|
| 623 | |
---|
| 624 | # Instrument info |
---|
| 625 | instr = doc.createElement("SASinstrument") |
---|
| 626 | entry_node.appendChild(instr) |
---|
| 627 | |
---|
| 628 | write_node(doc, instr, "name", datainfo.instrument) |
---|
| 629 | |
---|
| 630 | # Source |
---|
| 631 | source = doc.createElement("SASsource") |
---|
[579ba85] | 632 | if datainfo.source.name is not None: |
---|
| 633 | source.setAttribute("name", str(datainfo.source.name)) |
---|
[4c00964] | 634 | instr.appendChild(source) |
---|
| 635 | |
---|
| 636 | write_node(doc, source, "radiation", datainfo.source.radiation) |
---|
| 637 | write_node(doc, source, "beam_shape", datainfo.source.beam_shape) |
---|
[579ba85] | 638 | size = doc.createElement("beam_size") |
---|
| 639 | if datainfo.source.beam_size_name is not None: |
---|
| 640 | size.setAttribute("name", str(datainfo.source.beam_size_name)) |
---|
| 641 | written = write_node(doc, size, "x", datainfo.source.beam_size.x, {"unit":datainfo.source.beam_size_unit}) |
---|
| 642 | written = written | write_node(doc, size, "y", datainfo.source.beam_size.y, {"unit":datainfo.source.beam_size_unit}) |
---|
| 643 | written = written | write_node(doc, size, "z", datainfo.source.beam_size.z, {"unit":datainfo.source.beam_size_unit}) |
---|
| 644 | if written == True: |
---|
| 645 | source.appendChild(size) |
---|
| 646 | |
---|
[4c00964] | 647 | write_node(doc, source, "wavelength", datainfo.source.wavelength, {"unit":datainfo.source.wavelength_unit}) |
---|
| 648 | write_node(doc, source, "wavelength_min", datainfo.source.wavelength_min, {"unit":datainfo.source.wavelength_min_unit}) |
---|
| 649 | write_node(doc, source, "wavelength_max", datainfo.source.wavelength_max, {"unit":datainfo.source.wavelength_max_unit}) |
---|
| 650 | write_node(doc, source, "wavelength_spread", datainfo.source.wavelength_spread, {"unit":datainfo.source.wavelength_spread_unit}) |
---|
| 651 | |
---|
| 652 | # Collimation |
---|
| 653 | for item in datainfo.collimation: |
---|
| 654 | coll = doc.createElement("SAScollimation") |
---|
[579ba85] | 655 | if item.name is not None: |
---|
| 656 | coll.setAttribute("name", str(item.name)) |
---|
[4c00964] | 657 | instr.appendChild(coll) |
---|
| 658 | |
---|
| 659 | write_node(doc, coll, "length", item.length, {"unit":item.length_unit}) |
---|
| 660 | |
---|
| 661 | for apert in item.aperture: |
---|
[579ba85] | 662 | ap = doc.createElement("aperture") |
---|
| 663 | if apert.name is not None: |
---|
| 664 | ap.setAttribute("name", str(apert.name)) |
---|
| 665 | if apert.type is not None: |
---|
| 666 | ap.setAttribute("type", str(apert.type)) |
---|
| 667 | coll.appendChild(ap) |
---|
[4c00964] | 668 | |
---|
| 669 | write_node(doc, ap, "distance", apert.distance, {"unit":apert.distance_unit}) |
---|
| 670 | |
---|
| 671 | size = doc.createElement("size") |
---|
[579ba85] | 672 | if apert.size_name is not None: |
---|
| 673 | size.setAttribute("name", str(apert.size_name)) |
---|
| 674 | written = write_node(doc, size, "x", apert.size.x, {"unit":apert.size_unit}) |
---|
| 675 | written = written | write_node(doc, size, "y", apert.size.y, {"unit":apert.size_unit}) |
---|
| 676 | written = written | write_node(doc, size, "z", apert.size.z, {"unit":apert.size_unit}) |
---|
| 677 | if written == True: |
---|
| 678 | ap.appendChild(size) |
---|
[4c00964] | 679 | |
---|
| 680 | # Detectors |
---|
| 681 | for item in datainfo.detector: |
---|
| 682 | det = doc.createElement("SASdetector") |
---|
[579ba85] | 683 | written = write_node(doc, det, "name", item.name) |
---|
| 684 | written = written | write_node(doc, det, "SDD", item.distance, {"unit":item.distance_unit}) |
---|
| 685 | written = written | write_node(doc, det, "slit_length", item.slit_length, {"unit":item.slit_length_unit}) |
---|
| 686 | if written == True: |
---|
| 687 | instr.appendChild(det) |
---|
[4c00964] | 688 | |
---|
| 689 | off = doc.createElement("offset") |
---|
[579ba85] | 690 | written = write_node(doc, off, "x", item.offset.x, {"unit":item.offset_unit}) |
---|
| 691 | written = written | write_node(doc, off, "y", item.offset.y, {"unit":item.offset_unit}) |
---|
| 692 | written = written | write_node(doc, off, "z", item.offset.z, {"unit":item.offset_unit}) |
---|
| 693 | if written == True: |
---|
| 694 | det.appendChild(off) |
---|
[4c00964] | 695 | |
---|
| 696 | center = doc.createElement("beam_center") |
---|
[579ba85] | 697 | written = write_node(doc, center, "x", item.beam_center.x, {"unit":item.beam_center_unit}) |
---|
| 698 | written = written | write_node(doc, center, "y", item.beam_center.y, {"unit":item.beam_center_unit}) |
---|
| 699 | written = written | write_node(doc, center, "z", item.beam_center.z, {"unit":item.beam_center_unit}) |
---|
| 700 | if written == True: |
---|
| 701 | det.appendChild(center) |
---|
| 702 | |
---|
[4c00964] | 703 | pix = doc.createElement("pixel_size") |
---|
[579ba85] | 704 | written = write_node(doc, pix, "x", item.pixel_size.x, {"unit":item.pixel_size_unit}) |
---|
| 705 | written = written | write_node(doc, pix, "y", item.pixel_size.y, {"unit":item.pixel_size_unit}) |
---|
| 706 | written = written | write_node(doc, pix, "z", item.pixel_size.z, {"unit":item.pixel_size_unit}) |
---|
| 707 | if written == True: |
---|
| 708 | det.appendChild(pix) |
---|
| 709 | |
---|
| 710 | ori = doc.createElement("orientation") |
---|
| 711 | written = write_node(doc, ori, "roll", item.orientation.x, {"unit":item.orientation_unit}) |
---|
| 712 | written = written | write_node(doc, ori, "pitch", item.orientation.y, {"unit":item.orientation_unit}) |
---|
| 713 | written = written | write_node(doc, ori, "yaw", item.orientation.z, {"unit":item.orientation_unit}) |
---|
| 714 | if written == True: |
---|
| 715 | det.appendChild(ori) |
---|
| 716 | |
---|
[4c00964] | 717 | |
---|
[579ba85] | 718 | # Processes info |
---|
[4c00964] | 719 | for item in datainfo.process: |
---|
| 720 | node = doc.createElement("SASprocess") |
---|
| 721 | entry_node.appendChild(node) |
---|
| 722 | |
---|
[579ba85] | 723 | write_node(doc, node, "name", item.name) |
---|
| 724 | write_node(doc, node, "date", item.date) |
---|
| 725 | write_node(doc, node, "description", item.description) |
---|
| 726 | for term in item.term: |
---|
| 727 | value = term['value'] |
---|
| 728 | del term['value'] |
---|
| 729 | write_node(doc, node, "term", value, term) |
---|
| 730 | for note in item.notes: |
---|
| 731 | write_node(doc, node, "SASprocessnote", note) |
---|
[4c00964] | 732 | |
---|
| 733 | |
---|
| 734 | # Write the file |
---|
| 735 | fd = open(filename, 'w') |
---|
| 736 | fd.write(doc.toprettyxml()) |
---|
| 737 | fd.close() |
---|
| 738 | |
---|
| 739 | |
---|
[8780e9a] | 740 | if __name__ == "__main__": |
---|
| 741 | logging.basicConfig(level=logging.ERROR, |
---|
| 742 | format='%(asctime)s %(levelname)s %(message)s', |
---|
| 743 | filename='cansas_reader.log', |
---|
| 744 | filemode='w') |
---|
| 745 | reader = Reader() |
---|
| 746 | print reader.read("../test/cansas1d.xml") |
---|
| 747 | |
---|
| 748 | |
---|
| 749 | |
---|