[8780e9a] | 1 | """ |
---|
| 2 | This software was developed by the University of Tennessee as part of the |
---|
| 3 | Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 4 | project funded by the US National Science Foundation. |
---|
| 5 | |
---|
| 6 | See the license text in license.txt |
---|
| 7 | |
---|
| 8 | copyright 2008, University of Tennessee |
---|
| 9 | """ |
---|
[579ba85] | 10 | # Known issue: reader not compatible with multiple SASdata entries |
---|
| 11 | # within a single SASentry. Will raise a runtime error. |
---|
[8780e9a] | 12 | |
---|
[4c00964] | 13 | #TODO: check that all vectors are written only if they have at least one non-empty value |
---|
[579ba85] | 14 | #TODO: Writing only allows one SASentry per file. Would be best to allow multiple entries. |
---|
[8780e9a] | 15 | #TODO: Store error list |
---|
| 16 | #TODO: Allow for additional meta data for each section |
---|
| 17 | #TODO: Notes need to be implemented. They can be any XML structure in version 1.0 |
---|
| 18 | # Process notes have the same problem. |
---|
[e390933] | 19 | #TODO: Unit conversion is not complete (temperature units are missing) |
---|
[8780e9a] | 20 | |
---|
| 21 | |
---|
| 22 | import logging |
---|
| 23 | import numpy |
---|
| 24 | import os, sys |
---|
[d6513cd] | 25 | from DataLoader.data_info import Data1D, Collimation, Detector, Process, Aperture |
---|
[8780e9a] | 26 | from xml import xpath |
---|
[4c00964] | 27 | import xml.dom.minidom |
---|
| 28 | |
---|
[8780e9a] | 29 | |
---|
[b39c817] | 30 | has_converter = True |
---|
| 31 | try: |
---|
| 32 | from data_util.nxsunit import Converter |
---|
| 33 | except: |
---|
| 34 | has_converter = False |
---|
| 35 | |
---|
[4c00964] | 36 | def write_node(doc, parent, name, value, attr={}): |
---|
| 37 | """ |
---|
| 38 | @param doc: document DOM |
---|
| 39 | @param parent: parent node |
---|
| 40 | @param name: tag of the element |
---|
| 41 | @param value: value of the child text node |
---|
| 42 | @param attr: attribute dictionary |
---|
| 43 | @return: True if something was appended, otherwise False |
---|
| 44 | """ |
---|
| 45 | if value is not None: |
---|
| 46 | node = doc.createElement(name) |
---|
| 47 | node.appendChild(doc.createTextNode(str(value))) |
---|
| 48 | for item in attr: |
---|
| 49 | node.setAttribute(item, attr[item]) |
---|
| 50 | parent.appendChild(node) |
---|
| 51 | return True |
---|
| 52 | return False |
---|
| 53 | |
---|
[8780e9a] | 54 | def get_node_text(node): |
---|
| 55 | """ |
---|
| 56 | Get the text context of a node |
---|
| 57 | |
---|
| 58 | @param node: node to read from |
---|
| 59 | @return: content, attribute list |
---|
| 60 | """ |
---|
| 61 | content = None |
---|
| 62 | attr = {} |
---|
| 63 | for item in node.childNodes: |
---|
| 64 | if item.nodeName.find('text')>=0 \ |
---|
| 65 | and len(item.nodeValue.strip())>0: |
---|
| 66 | content = item.nodeValue.strip() |
---|
| 67 | break |
---|
| 68 | |
---|
| 69 | if node.hasAttributes(): |
---|
| 70 | for i in range(node.attributes.length): |
---|
| 71 | attr[node.attributes.item(i).nodeName] \ |
---|
| 72 | = node.attributes.item(i).nodeValue |
---|
| 73 | |
---|
| 74 | return content, attr |
---|
| 75 | |
---|
| 76 | def get_content(location, node): |
---|
| 77 | """ |
---|
| 78 | Get the first instance of the content of a xpath location |
---|
| 79 | |
---|
| 80 | @param location: xpath location |
---|
| 81 | @param node: node to start at |
---|
| 82 | """ |
---|
| 83 | value = None |
---|
| 84 | attr = {} |
---|
| 85 | nodes = xpath.Evaluate(location, node) |
---|
| 86 | if len(nodes)>0: |
---|
| 87 | try: |
---|
| 88 | # Skip comments and empty lines |
---|
| 89 | for item in nodes[0].childNodes: |
---|
| 90 | if item.nodeName.find('text')>=0 \ |
---|
| 91 | and len(item.nodeValue.strip())>0: |
---|
| 92 | value = item.nodeValue.strip() |
---|
| 93 | break |
---|
| 94 | |
---|
[d6513cd] | 95 | if nodes[0].hasAttributes(): |
---|
| 96 | for i in range(nodes[0].attributes.length): |
---|
| 97 | attr[nodes[0].attributes.item(i).nodeName] \ |
---|
| 98 | = nodes[0].attributes.item(i).nodeValue |
---|
[8780e9a] | 99 | except: |
---|
| 100 | # problem reading the node. Skip it and return that |
---|
| 101 | # nothing was found |
---|
| 102 | logging.error("cansas_reader.get_content: %s\n %s" % (location, sys.exc_value)) |
---|
| 103 | |
---|
| 104 | return value, attr |
---|
| 105 | |
---|
| 106 | def get_float(location, node): |
---|
| 107 | """ |
---|
| 108 | Get the content of a node as a float |
---|
| 109 | |
---|
| 110 | @param location: xpath location |
---|
| 111 | @param node: node to start at |
---|
| 112 | """ |
---|
| 113 | value = None |
---|
| 114 | attr = {} |
---|
| 115 | content, attr = get_content(location, node) |
---|
| 116 | if content is not None: |
---|
| 117 | try: |
---|
[b39c817] | 118 | value = float(content) |
---|
[8780e9a] | 119 | except: |
---|
| 120 | # Could not pass, skip and return None |
---|
| 121 | logging.error("cansas_reader.get_float: could not convert '%s' to float" % content) |
---|
| 122 | |
---|
| 123 | return value, attr |
---|
| 124 | |
---|
| 125 | def _store_float(location, node, variable, storage): |
---|
| 126 | """ |
---|
| 127 | Get the content of a xpath location and store |
---|
| 128 | the result. Check that the units are compatible |
---|
| 129 | with the destination. The value is expected to |
---|
| 130 | be a float. |
---|
| 131 | |
---|
| 132 | The xpath location might or might not exist. |
---|
| 133 | If it does not exist, nothing is done |
---|
| 134 | |
---|
| 135 | @param location: xpath location to fetch |
---|
| 136 | @param node: node to read the data from |
---|
| 137 | @param variable: name of the data member to store it in [string] |
---|
| 138 | @param storage: data object that has the 'variable' data member |
---|
| 139 | |
---|
| 140 | @raise ValueError: raised when the units are not recognized |
---|
| 141 | """ |
---|
| 142 | value, attr = get_float(location, node) |
---|
| 143 | if value is not None: |
---|
| 144 | # If the entry has units, check to see that they are |
---|
| 145 | # compatible with what we currently have in the data object |
---|
| 146 | if attr.has_key('unit'): |
---|
| 147 | toks = variable.split('.') |
---|
[e390933] | 148 | exec "local_unit = storage.%s_unit" % toks[0] |
---|
| 149 | if attr['unit'].lower()!=local_unit.lower(): |
---|
[b39c817] | 150 | if has_converter==True: |
---|
| 151 | try: |
---|
| 152 | conv = Converter(attr['unit']) |
---|
| 153 | exec "storage.%s = %g" % (variable, conv(value, units=local_unit)) |
---|
| 154 | except: |
---|
| 155 | raise ValueError, "CanSAS reader: could not convert %s unit [%s]; expecting [%s]\n %s" \ |
---|
| 156 | % (variable, attr['unit'], local_unit, sys.exc_value) |
---|
| 157 | else: |
---|
| 158 | raise ValueError, "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ |
---|
| 159 | % (variable, attr['unit'], local_unit) |
---|
| 160 | else: |
---|
| 161 | exec "storage.%s = value" % variable |
---|
| 162 | else: |
---|
| 163 | exec "storage.%s = value" % variable |
---|
| 164 | |
---|
[8780e9a] | 165 | |
---|
| 166 | def _store_content(location, node, variable, storage): |
---|
| 167 | """ |
---|
| 168 | Get the content of a xpath location and store |
---|
| 169 | the result. The value is treated as a string. |
---|
| 170 | |
---|
| 171 | The xpath location might or might not exist. |
---|
| 172 | If it does not exist, nothing is done |
---|
| 173 | |
---|
| 174 | @param location: xpath location to fetch |
---|
| 175 | @param node: node to read the data from |
---|
| 176 | @param variable: name of the data member to store it in [string] |
---|
| 177 | @param storage: data object that has the 'variable' data member |
---|
| 178 | """ |
---|
| 179 | value, attr = get_content(location, node) |
---|
| 180 | if value is not None: |
---|
| 181 | exec "storage.%s = value" % variable |
---|
| 182 | |
---|
| 183 | |
---|
| 184 | class Reader: |
---|
| 185 | """ |
---|
| 186 | Class to load cansas 1D XML files |
---|
| 187 | |
---|
| 188 | Dependencies: |
---|
| 189 | The CanSas reader requires PyXML 0.8.4 or later. |
---|
| 190 | """ |
---|
| 191 | ## CanSAS version |
---|
| 192 | version = '1.0' |
---|
| 193 | ## File type |
---|
| 194 | type = ["CanSAS 1D files (*.xml)|*.xml"] |
---|
| 195 | ## List of allowed extensions |
---|
| 196 | ext=['.xml', '.XML'] |
---|
| 197 | |
---|
| 198 | def read(self, path): |
---|
| 199 | """ |
---|
| 200 | Load data file |
---|
| 201 | |
---|
| 202 | @param path: file path |
---|
| 203 | @return: Data1D object if a single SASentry was found, |
---|
| 204 | or a list of Data1D objects if multiple entries were found, |
---|
| 205 | or None of nothing was found |
---|
| 206 | @raise RuntimeError: when the file can't be opened |
---|
| 207 | @raise ValueError: when the length of the data vectors are inconsistent |
---|
| 208 | """ |
---|
| 209 | from xml.dom.minidom import parse |
---|
| 210 | |
---|
| 211 | output = [] |
---|
| 212 | |
---|
| 213 | if os.path.isfile(path): |
---|
| 214 | basename = os.path.basename(path) |
---|
| 215 | root, extension = os.path.splitext(basename) |
---|
| 216 | if extension.lower() in self.ext: |
---|
| 217 | |
---|
| 218 | dom = parse(path) |
---|
| 219 | |
---|
| 220 | # Check the format version number |
---|
| 221 | nodes = xpath.Evaluate('SASroot', dom) |
---|
| 222 | if nodes[0].hasAttributes(): |
---|
| 223 | for i in range(nodes[0].attributes.length): |
---|
| 224 | if nodes[0].attributes.item(i).nodeName=='version': |
---|
| 225 | if nodes[0].attributes.item(i).nodeValue != self.version: |
---|
| 226 | raise ValueError, "cansas_reader: unrecognized version number %s" % \ |
---|
| 227 | nodes[0].attributes.item(i).nodeValue |
---|
| 228 | |
---|
| 229 | entry_list = xpath.Evaluate('SASroot/SASentry', dom) |
---|
| 230 | for entry in entry_list: |
---|
| 231 | sas_entry = self._parse_entry(entry) |
---|
| 232 | sas_entry.filename = basename |
---|
| 233 | output.append(sas_entry) |
---|
| 234 | |
---|
| 235 | else: |
---|
| 236 | raise RuntimeError, "%s is not a file" % path |
---|
| 237 | |
---|
| 238 | # Return output consistent with the loader's api |
---|
| 239 | if len(output)==0: |
---|
| 240 | return None |
---|
| 241 | elif len(output)==1: |
---|
| 242 | return output[0] |
---|
| 243 | else: |
---|
| 244 | return output |
---|
| 245 | |
---|
| 246 | def _parse_entry(self, dom): |
---|
| 247 | """ |
---|
| 248 | Parse a SASentry |
---|
| 249 | |
---|
| 250 | @param node: SASentry node |
---|
| 251 | @return: Data1D object |
---|
| 252 | """ |
---|
| 253 | x = numpy.zeros(0) |
---|
| 254 | y = numpy.zeros(0) |
---|
| 255 | |
---|
| 256 | data_info = Data1D(x, y) |
---|
| 257 | |
---|
| 258 | # Look up title |
---|
| 259 | _store_content('Title', dom, 'title', data_info) |
---|
[579ba85] | 260 | # Look up run number |
---|
| 261 | nodes = xpath.Evaluate('Run', dom) |
---|
| 262 | for item in nodes: |
---|
| 263 | value, attr = get_node_text(item) |
---|
| 264 | if value is not None: |
---|
| 265 | data_info.run.append(value) |
---|
| 266 | if attr.has_key('name'): |
---|
| 267 | data_info.run_name[value] = attr['name'] |
---|
| 268 | |
---|
[8780e9a] | 269 | # Look up instrument name |
---|
[579ba85] | 270 | _store_content('SASinstrument/name', dom, 'instrument', data_info) |
---|
| 271 | #value, attr = get_content('SASinstrument', dom) |
---|
| 272 | #if attr.has_key('name'): |
---|
| 273 | # data_info.instrument = attr['name'] |
---|
[8780e9a] | 274 | |
---|
| 275 | note_list = xpath.Evaluate('SASnote', dom) |
---|
| 276 | for note in note_list: |
---|
| 277 | try: |
---|
| 278 | note_value, note_attr = get_node_text(note) |
---|
| 279 | if note_value is not None: |
---|
| 280 | data_info.notes.append(note_value) |
---|
| 281 | except: |
---|
| 282 | logging.error("cansas_reader.read: error processing entry notes\n %s" % sys.exc_value) |
---|
| 283 | |
---|
| 284 | |
---|
| 285 | # Sample info ################### |
---|
[579ba85] | 286 | value, attr = get_content('SASsample', dom) |
---|
| 287 | if attr.has_key('name'): |
---|
| 288 | data_info.sample.name = attr['name'] |
---|
| 289 | |
---|
[8780e9a] | 290 | _store_content('SASsample/ID', |
---|
| 291 | dom, 'ID', data_info.sample) |
---|
| 292 | _store_float('SASsample/thickness', |
---|
| 293 | dom, 'thickness', data_info.sample) |
---|
| 294 | _store_float('SASsample/transmission', |
---|
| 295 | dom, 'transmission', data_info.sample) |
---|
| 296 | _store_float('SASsample/temperature', |
---|
| 297 | dom, 'temperature', data_info.sample) |
---|
| 298 | nodes = xpath.Evaluate('SASsample/details', dom) |
---|
| 299 | for item in nodes: |
---|
| 300 | try: |
---|
| 301 | detail_value, detail_attr = get_node_text(item) |
---|
| 302 | if detail_value is not None: |
---|
| 303 | data_info.sample.details.append(detail_value) |
---|
| 304 | except: |
---|
| 305 | logging.error("cansas_reader.read: error processing sample details\n %s" % sys.exc_value) |
---|
| 306 | |
---|
| 307 | # Position (as a vector) |
---|
| 308 | _store_float('SASsample/position/x', |
---|
| 309 | dom, 'position.x', data_info.sample) |
---|
| 310 | _store_float('SASsample/position/y', |
---|
| 311 | dom, 'position.y', data_info.sample) |
---|
| 312 | _store_float('SASsample/position/z', |
---|
| 313 | dom, 'position.z', data_info.sample) |
---|
| 314 | |
---|
| 315 | # Orientation (as a vector) |
---|
| 316 | _store_float('SASsample/orientation/roll', |
---|
| 317 | dom, 'orientation.x', data_info.sample) |
---|
| 318 | _store_float('SASsample/orientation/pitch', |
---|
| 319 | dom, 'orientation.y', data_info.sample) |
---|
| 320 | _store_float('SASsample/orientation/yaw', |
---|
| 321 | dom, 'orientation.z', data_info.sample) |
---|
| 322 | |
---|
| 323 | # Source info ################### |
---|
[4c00964] | 324 | value, attr = get_content('SASinstrument/SASsource', dom) |
---|
| 325 | if attr.has_key('name'): |
---|
| 326 | data_info.source.name = attr['name'] |
---|
| 327 | |
---|
[8780e9a] | 328 | _store_content('SASinstrument/SASsource/radiation', |
---|
| 329 | dom, 'radiation', data_info.source) |
---|
| 330 | _store_content('SASinstrument/SASsource/beam_shape', |
---|
| 331 | dom, 'beam_shape', data_info.source) |
---|
| 332 | _store_float('SASinstrument/SASsource/wavelength', |
---|
| 333 | dom, 'wavelength', data_info.source) |
---|
| 334 | _store_float('SASinstrument/SASsource/wavelength_min', |
---|
| 335 | dom, 'wavelength_min', data_info.source) |
---|
| 336 | _store_float('SASinstrument/SASsource/wavelength_max', |
---|
| 337 | dom, 'wavelength_max', data_info.source) |
---|
| 338 | _store_float('SASinstrument/SASsource/wavelength_spread', |
---|
| 339 | dom, 'wavelength_spread', data_info.source) |
---|
| 340 | |
---|
[579ba85] | 341 | # Beam size (as a vector) |
---|
| 342 | value, attr = get_content('SASinstrument/SASsource/beam_size', dom) |
---|
| 343 | if attr.has_key('name'): |
---|
| 344 | data_info.source.beam_size_name = attr['name'] |
---|
| 345 | |
---|
[8780e9a] | 346 | _store_float('SASinstrument/SASsource/beam_size/x', |
---|
| 347 | dom, 'beam_size.x', data_info.source) |
---|
| 348 | _store_float('SASinstrument/SASsource/beam_size/y', |
---|
| 349 | dom, 'beam_size.y', data_info.source) |
---|
| 350 | _store_float('SASinstrument/SASsource/beam_size/z', |
---|
| 351 | dom, 'beam_size.z', data_info.source) |
---|
| 352 | |
---|
| 353 | # Collimation info ################### |
---|
| 354 | nodes = xpath.Evaluate('SASinstrument/SAScollimation', dom) |
---|
| 355 | for item in nodes: |
---|
| 356 | collim = Collimation() |
---|
[4c00964] | 357 | value, attr = get_node_text(item) |
---|
| 358 | if attr.has_key('name'): |
---|
| 359 | collim.name = attr['name'] |
---|
[8780e9a] | 360 | _store_float('length', item, 'length', collim) |
---|
| 361 | |
---|
| 362 | # Look for apertures |
---|
| 363 | apert_list = xpath.Evaluate('aperture', item) |
---|
| 364 | for apert in apert_list: |
---|
[d6513cd] | 365 | aperture = Aperture() |
---|
[4c00964] | 366 | |
---|
| 367 | # Get the name and type of the aperture |
---|
[579ba85] | 368 | ap_value, ap_attr = get_node_text(apert) |
---|
[4c00964] | 369 | if ap_attr.has_key('name'): |
---|
| 370 | aperture.name = ap_attr['name'] |
---|
| 371 | if ap_attr.has_key('type'): |
---|
| 372 | aperture.type = ap_attr['type'] |
---|
| 373 | |
---|
[8780e9a] | 374 | _store_float('distance', apert, 'distance', aperture) |
---|
[579ba85] | 375 | |
---|
| 376 | value, attr = get_content('size', apert) |
---|
| 377 | if attr.has_key('name'): |
---|
| 378 | aperture.size_name = attr['name'] |
---|
| 379 | |
---|
[8780e9a] | 380 | _store_float('size/x', apert, 'size.x', aperture) |
---|
| 381 | _store_float('size/y', apert, 'size.y', aperture) |
---|
| 382 | _store_float('size/z', apert, 'size.z', aperture) |
---|
| 383 | |
---|
| 384 | collim.aperture.append(aperture) |
---|
| 385 | |
---|
| 386 | data_info.collimation.append(collim) |
---|
| 387 | |
---|
| 388 | # Detector info ###################### |
---|
| 389 | nodes = xpath.Evaluate('SASinstrument/SASdetector', dom) |
---|
| 390 | for item in nodes: |
---|
| 391 | |
---|
| 392 | detector = Detector() |
---|
| 393 | |
---|
| 394 | _store_content('name', item, 'name', detector) |
---|
| 395 | _store_float('SDD', item, 'distance', detector) |
---|
| 396 | |
---|
| 397 | # Detector offset (as a vector) |
---|
| 398 | _store_float('offset/x', item, 'offset.x', detector) |
---|
| 399 | _store_float('offset/y', item, 'offset.y', detector) |
---|
| 400 | _store_float('offset/z', item, 'offset.z', detector) |
---|
| 401 | |
---|
| 402 | # Detector orientation (as a vector) |
---|
[579ba85] | 403 | _store_float('orientation/roll', item, 'orientation.x', detector) |
---|
| 404 | _store_float('orientation/pitch', item, 'orientation.y', detector) |
---|
| 405 | _store_float('orientation/yaw', item, 'orientation.z', detector) |
---|
[8780e9a] | 406 | |
---|
| 407 | # Beam center (as a vector) |
---|
| 408 | _store_float('beam_center/x', item, 'beam_center.x', detector) |
---|
| 409 | _store_float('beam_center/y', item, 'beam_center.y', detector) |
---|
| 410 | _store_float('beam_center/z', item, 'beam_center.z', detector) |
---|
| 411 | |
---|
| 412 | # Pixel size (as a vector) |
---|
| 413 | _store_float('pixel_size/x', item, 'pixel_size.x', detector) |
---|
| 414 | _store_float('pixel_size/y', item, 'pixel_size.y', detector) |
---|
| 415 | _store_float('pixel_size/z', item, 'pixel_size.z', detector) |
---|
| 416 | |
---|
| 417 | _store_float('slit_length', item, 'slit_length', detector) |
---|
| 418 | |
---|
| 419 | data_info.detector.append(detector) |
---|
| 420 | |
---|
| 421 | # Processes info ###################### |
---|
| 422 | nodes = xpath.Evaluate('SASprocess', dom) |
---|
| 423 | for item in nodes: |
---|
| 424 | process = Process() |
---|
| 425 | _store_content('name', item, 'name', process) |
---|
| 426 | _store_content('date', item, 'date', process) |
---|
| 427 | _store_content('description', item, 'description', process) |
---|
| 428 | |
---|
| 429 | term_list = xpath.Evaluate('term', item) |
---|
| 430 | for term in term_list: |
---|
| 431 | try: |
---|
| 432 | term_value, term_attr = get_node_text(term) |
---|
| 433 | term_attr['value'] = term_value |
---|
| 434 | if term_value is not None: |
---|
| 435 | process.term.append(term_attr) |
---|
| 436 | except: |
---|
| 437 | logging.error("cansas_reader.read: error processing process term\n %s" % sys.exc_value) |
---|
| 438 | |
---|
| 439 | note_list = xpath.Evaluate('SASprocessnote', item) |
---|
| 440 | for note in note_list: |
---|
| 441 | try: |
---|
| 442 | note_value, note_attr = get_node_text(note) |
---|
| 443 | if note_value is not None: |
---|
| 444 | process.notes.append(note_value) |
---|
| 445 | except: |
---|
| 446 | logging.error("cansas_reader.read: error processing process notes\n %s" % sys.exc_value) |
---|
| 447 | |
---|
| 448 | |
---|
| 449 | data_info.process.append(process) |
---|
| 450 | |
---|
| 451 | |
---|
| 452 | # Data info ###################### |
---|
[579ba85] | 453 | nodes = xpath.Evaluate('SASdata', dom) |
---|
| 454 | if len(nodes)>1: |
---|
| 455 | raise RuntimeError, "CanSAS reader is not compatible with multiple SASdata entries" |
---|
| 456 | |
---|
[8780e9a] | 457 | nodes = xpath.Evaluate('SASdata/Idata', dom) |
---|
| 458 | x = numpy.zeros(0) |
---|
| 459 | y = numpy.zeros(0) |
---|
| 460 | dx = numpy.zeros(0) |
---|
| 461 | dy = numpy.zeros(0) |
---|
[d00f8ff] | 462 | dxw = numpy.zeros(0) |
---|
| 463 | dxl = numpy.zeros(0) |
---|
[8780e9a] | 464 | |
---|
| 465 | for item in nodes: |
---|
| 466 | _x, attr = get_float('Q', item) |
---|
[e390933] | 467 | _dx, attr_d = get_float('Qdev', item) |
---|
[d00f8ff] | 468 | _dxl, attr_l = get_float('dQl', item) |
---|
| 469 | _dxw, attr_w = get_float('dQw', item) |
---|
[8780e9a] | 470 | if _dx == None: |
---|
| 471 | _dx = 0.0 |
---|
[d00f8ff] | 472 | if _dxl == None: |
---|
| 473 | _dxl = 0.0 |
---|
| 474 | if _dxw == None: |
---|
| 475 | _dxw = 0.0 |
---|
[8780e9a] | 476 | |
---|
[e390933] | 477 | if attr.has_key('unit') and attr['unit'].lower() != data_info.x_unit.lower(): |
---|
| 478 | if has_converter==True: |
---|
| 479 | try: |
---|
| 480 | data_conv_q = Converter(attr['unit']) |
---|
| 481 | _x = data_conv_q(_x, units=data_info.x_unit) |
---|
| 482 | except: |
---|
| 483 | raise ValueError, "CanSAS reader: could not convert Q unit [%s]; expecting [%s]\n %s" \ |
---|
| 484 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 485 | else: |
---|
| 486 | raise ValueError, "CanSAS reader: unrecognized Q unit [%s]; expecting [%s]" \ |
---|
| 487 | % (attr['unit'], data_info.x_unit) |
---|
[d00f8ff] | 488 | # Error in Q |
---|
[e390933] | 489 | if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.x_unit.lower(): |
---|
| 490 | if has_converter==True: |
---|
| 491 | try: |
---|
| 492 | data_conv_q = Converter(attr_d['unit']) |
---|
| 493 | _dx = data_conv_q(_dx, units=data_info.x_unit) |
---|
| 494 | except: |
---|
| 495 | raise ValueError, "CanSAS reader: could not convert dQ unit [%s]; expecting [%s]\n %s" \ |
---|
| 496 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 497 | else: |
---|
| 498 | raise ValueError, "CanSAS reader: unrecognized dQ unit [%s]; expecting [%s]" \ |
---|
| 499 | % (attr['unit'], data_info.x_unit) |
---|
[d00f8ff] | 500 | # Slit length |
---|
| 501 | if attr_l.has_key('unit') and attr_l['unit'].lower() != data_info.x_unit.lower(): |
---|
| 502 | if has_converter==True: |
---|
| 503 | try: |
---|
| 504 | data_conv_q = Converter(attr_l['unit']) |
---|
| 505 | _dxl = data_conv_q(_dxl, units=data_info.x_unit) |
---|
| 506 | except: |
---|
| 507 | raise ValueError, "CanSAS reader: could not convert dQl unit [%s]; expecting [%s]\n %s" \ |
---|
| 508 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 509 | else: |
---|
| 510 | raise ValueError, "CanSAS reader: unrecognized dQl unit [%s]; expecting [%s]" \ |
---|
| 511 | % (attr['unit'], data_info.x_unit) |
---|
| 512 | # Slit width |
---|
| 513 | if attr_w.has_key('unit') and attr_w['unit'].lower() != data_info.x_unit.lower(): |
---|
| 514 | if has_converter==True: |
---|
| 515 | try: |
---|
| 516 | data_conv_q = Converter(attr_w['unit']) |
---|
| 517 | _dxw = data_conv_q(_dxw, units=data_info.x_unit) |
---|
| 518 | except: |
---|
| 519 | raise ValueError, "CanSAS reader: could not convert dQw unit [%s]; expecting [%s]\n %s" \ |
---|
| 520 | % (attr['unit'], data_info.x_unit, sys.exc_value) |
---|
| 521 | else: |
---|
| 522 | raise ValueError, "CanSAS reader: unrecognized dQw unit [%s]; expecting [%s]" \ |
---|
| 523 | % (attr['unit'], data_info.x_unit) |
---|
[e390933] | 524 | |
---|
[8780e9a] | 525 | _y, attr = get_float('I', item) |
---|
[e390933] | 526 | _dy, attr_d = get_float('Idev', item) |
---|
[8780e9a] | 527 | if _dy == None: |
---|
| 528 | _dy = 0.0 |
---|
| 529 | if attr.has_key('unit') and attr['unit'].lower() != data_info.y_unit.lower(): |
---|
[e390933] | 530 | if has_converter==True: |
---|
| 531 | try: |
---|
| 532 | data_conv_i = Converter(attr['unit']) |
---|
| 533 | _y = data_conv_i(_y, units=data_info.y_unit) |
---|
| 534 | except: |
---|
| 535 | raise ValueError, "CanSAS reader: could not convert I(q) unit [%s]; expecting [%s]\n %s" \ |
---|
| 536 | % (attr['unit'], data_info.y_unit, sys.exc_value) |
---|
| 537 | else: |
---|
| 538 | raise ValueError, "CanSAS reader: unrecognized I(q) unit [%s]; expecting [%s]" \ |
---|
| 539 | % (attr['unit'], data_info.y_unit) |
---|
| 540 | if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.y_unit.lower(): |
---|
| 541 | if has_converter==True: |
---|
| 542 | try: |
---|
| 543 | data_conv_i = Converter(attr_d['unit']) |
---|
| 544 | _dy = data_conv_i(_dy, units=data_info.y_unit) |
---|
| 545 | except: |
---|
| 546 | raise ValueError, "CanSAS reader: could not convert dI(q) unit [%s]; expecting [%s]\n %s" \ |
---|
| 547 | % (attr_d['unit'], data_info.y_unit, sys.exc_value) |
---|
| 548 | else: |
---|
| 549 | raise ValueError, "CanSAS reader: unrecognized dI(q) unit [%s]; expecting [%s]" \ |
---|
| 550 | % (attr_d['unit'], data_info.y_unit) |
---|
[8780e9a] | 551 | |
---|
| 552 | if _x is not None and _y is not None: |
---|
| 553 | x = numpy.append(x, _x) |
---|
[579ba85] | 554 | y = numpy.append(y, _y) |
---|
| 555 | dx = numpy.append(dx, _dx) |
---|
| 556 | dy = numpy.append(dy, _dy) |
---|
[d00f8ff] | 557 | dxl = numpy.append(dxl, _dxl) |
---|
| 558 | dxw = numpy.append(dxw, _dxw) |
---|
| 559 | |
---|
[8780e9a] | 560 | |
---|
| 561 | data_info.x = x |
---|
| 562 | data_info.y = y |
---|
| 563 | data_info.dx = dx |
---|
| 564 | data_info.dy = dy |
---|
[d00f8ff] | 565 | data_info.dxl = dxl |
---|
| 566 | data_info.dxw = dxw |
---|
[d6513cd] | 567 | |
---|
| 568 | data_conv_q = None |
---|
| 569 | data_conv_i = None |
---|
| 570 | |
---|
[ca10d8e] | 571 | if has_converter == True and data_info.x_unit != '1/A': |
---|
| 572 | data_conv_q = Converter('1/A') |
---|
[d6513cd] | 573 | # Test it |
---|
| 574 | data_conv_q(1.0, output.Q_unit) |
---|
| 575 | |
---|
[ca10d8e] | 576 | if has_converter == True and data_info.y_unit != '1/cm': |
---|
| 577 | data_conv_i = Converter('1/cm') |
---|
[d6513cd] | 578 | # Test it |
---|
[e390933] | 579 | data_conv_i(1.0, output.I_unit) |
---|
| 580 | |
---|
[99d1af6] | 581 | if data_conv_q is not None: |
---|
[d6513cd] | 582 | data_info.xaxis("\\rm{Q}", data_info.x_unit) |
---|
[99d1af6] | 583 | else: |
---|
| 584 | data_info.xaxis("\\rm{Q}", 'A^{-1}') |
---|
| 585 | if data_conv_i is not None: |
---|
[d6513cd] | 586 | data_info.yaxis("\\{I(Q)}", data_info.y_unit) |
---|
[99d1af6] | 587 | else: |
---|
| 588 | data_info.yaxis("\\rm{I(Q)}","cm^{-1}") |
---|
| 589 | |
---|
[8780e9a] | 590 | return data_info |
---|
| 591 | |
---|
[4c00964] | 592 | def write(self, filename, datainfo): |
---|
| 593 | """ |
---|
| 594 | Write the content of a Data1D as a CanSAS XML file |
---|
| 595 | |
---|
| 596 | @param filename: name of the file to write |
---|
| 597 | @param datainfo: Data1D object |
---|
| 598 | """ |
---|
| 599 | |
---|
| 600 | if not datainfo.__class__ == Data1D: |
---|
| 601 | raise RuntimeError, "The cansas writer expects a Data1D instance" |
---|
| 602 | |
---|
| 603 | doc = xml.dom.minidom.Document() |
---|
| 604 | main_node = doc.createElement("SASroot") |
---|
[fee780b] | 605 | main_node.setAttribute("version", self.version) |
---|
| 606 | main_node.setAttribute("xmlns", "cansas1d/%s" % self.version) |
---|
| 607 | main_node.setAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance") |
---|
| 608 | main_node.setAttribute("xsi:schemaLocation", "cansas1d/%s http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd" % self.version) |
---|
| 609 | |
---|
[4c00964] | 610 | doc.appendChild(main_node) |
---|
| 611 | |
---|
| 612 | entry_node = doc.createElement("SASentry") |
---|
| 613 | main_node.appendChild(entry_node) |
---|
| 614 | |
---|
[579ba85] | 615 | write_node(doc, entry_node, "Title", datainfo.title) |
---|
| 616 | |
---|
| 617 | for item in datainfo.run: |
---|
| 618 | runname = {} |
---|
| 619 | if datainfo.run_name.has_key(item) and len(str(datainfo.run_name[item]))>1: |
---|
| 620 | runname = {'name': datainfo.run_name[item] } |
---|
| 621 | write_node(doc, entry_node, "Run", item, runname) |
---|
[4c00964] | 622 | |
---|
| 623 | # Data info |
---|
| 624 | node = doc.createElement("SASdata") |
---|
| 625 | entry_node.appendChild(node) |
---|
| 626 | |
---|
[579ba85] | 627 | for i in range(len(datainfo.x)): |
---|
| 628 | pt = doc.createElement("Idata") |
---|
| 629 | node.appendChild(pt) |
---|
| 630 | write_node(doc, pt, "Q", datainfo.x[i], {'unit':datainfo.x_unit}) |
---|
| 631 | if len(datainfo.y)>=i: |
---|
| 632 | write_node(doc, pt, "I", datainfo.y[i], {'unit':datainfo.y_unit}) |
---|
[5b396b3] | 633 | if datainfo.dx !=None and len(datainfo.dx)>=i: |
---|
[579ba85] | 634 | write_node(doc, pt, "Qdev", datainfo.dx[i], {'unit':datainfo.x_unit}) |
---|
[72b524b] | 635 | if datainfo.dx !=None and len(datainfo.dy)>=i: |
---|
[579ba85] | 636 | write_node(doc, pt, "Idev", datainfo.dy[i], {'unit':datainfo.y_unit}) |
---|
| 637 | |
---|
| 638 | |
---|
[4c00964] | 639 | # Sample info |
---|
| 640 | sample = doc.createElement("SASsample") |
---|
[579ba85] | 641 | if datainfo.sample.name is not None: |
---|
| 642 | sample.setAttribute("name", str(datainfo.sample.name)) |
---|
[4c00964] | 643 | entry_node.appendChild(sample) |
---|
[579ba85] | 644 | write_node(doc, sample, "ID", str(datainfo.sample.ID)) |
---|
[4c00964] | 645 | write_node(doc, sample, "thickness", datainfo.sample.thickness, {"unit":datainfo.sample.thickness_unit}) |
---|
| 646 | write_node(doc, sample, "transmission", datainfo.sample.transmission) |
---|
| 647 | write_node(doc, sample, "temperature", datainfo.sample.temperature, {"unit":datainfo.sample.temperature_unit}) |
---|
| 648 | |
---|
| 649 | for item in datainfo.sample.details: |
---|
| 650 | write_node(doc, sample, "details", item) |
---|
| 651 | |
---|
| 652 | pos = doc.createElement("position") |
---|
[579ba85] | 653 | written = write_node(doc, pos, "x", datainfo.sample.position.x, {"unit":datainfo.sample.position_unit}) |
---|
| 654 | written = written | write_node(doc, pos, "y", datainfo.sample.position.y, {"unit":datainfo.sample.position_unit}) |
---|
| 655 | written = written | write_node(doc, pos, "z", datainfo.sample.position.z, {"unit":datainfo.sample.position_unit}) |
---|
[4c00964] | 656 | if written == True: |
---|
| 657 | sample.appendChild(pos) |
---|
| 658 | |
---|
| 659 | ori = doc.createElement("orientation") |
---|
[579ba85] | 660 | written = write_node(doc, ori, "roll", datainfo.sample.orientation.x, {"unit":datainfo.sample.orientation_unit}) |
---|
| 661 | written = written | write_node(doc, ori, "pitch", datainfo.sample.orientation.y, {"unit":datainfo.sample.orientation_unit}) |
---|
| 662 | written = written | write_node(doc, ori, "yaw", datainfo.sample.orientation.z, {"unit":datainfo.sample.orientation_unit}) |
---|
[4c00964] | 663 | if written == True: |
---|
| 664 | sample.appendChild(ori) |
---|
| 665 | |
---|
| 666 | # Instrument info |
---|
| 667 | instr = doc.createElement("SASinstrument") |
---|
| 668 | entry_node.appendChild(instr) |
---|
| 669 | |
---|
| 670 | write_node(doc, instr, "name", datainfo.instrument) |
---|
| 671 | |
---|
| 672 | # Source |
---|
| 673 | source = doc.createElement("SASsource") |
---|
[579ba85] | 674 | if datainfo.source.name is not None: |
---|
| 675 | source.setAttribute("name", str(datainfo.source.name)) |
---|
[4c00964] | 676 | instr.appendChild(source) |
---|
| 677 | |
---|
| 678 | write_node(doc, source, "radiation", datainfo.source.radiation) |
---|
| 679 | write_node(doc, source, "beam_shape", datainfo.source.beam_shape) |
---|
[579ba85] | 680 | size = doc.createElement("beam_size") |
---|
| 681 | if datainfo.source.beam_size_name is not None: |
---|
| 682 | size.setAttribute("name", str(datainfo.source.beam_size_name)) |
---|
| 683 | written = write_node(doc, size, "x", datainfo.source.beam_size.x, {"unit":datainfo.source.beam_size_unit}) |
---|
| 684 | written = written | write_node(doc, size, "y", datainfo.source.beam_size.y, {"unit":datainfo.source.beam_size_unit}) |
---|
| 685 | written = written | write_node(doc, size, "z", datainfo.source.beam_size.z, {"unit":datainfo.source.beam_size_unit}) |
---|
| 686 | if written == True: |
---|
| 687 | source.appendChild(size) |
---|
| 688 | |
---|
[4c00964] | 689 | write_node(doc, source, "wavelength", datainfo.source.wavelength, {"unit":datainfo.source.wavelength_unit}) |
---|
| 690 | write_node(doc, source, "wavelength_min", datainfo.source.wavelength_min, {"unit":datainfo.source.wavelength_min_unit}) |
---|
| 691 | write_node(doc, source, "wavelength_max", datainfo.source.wavelength_max, {"unit":datainfo.source.wavelength_max_unit}) |
---|
| 692 | write_node(doc, source, "wavelength_spread", datainfo.source.wavelength_spread, {"unit":datainfo.source.wavelength_spread_unit}) |
---|
| 693 | |
---|
| 694 | # Collimation |
---|
| 695 | for item in datainfo.collimation: |
---|
| 696 | coll = doc.createElement("SAScollimation") |
---|
[579ba85] | 697 | if item.name is not None: |
---|
| 698 | coll.setAttribute("name", str(item.name)) |
---|
[4c00964] | 699 | instr.appendChild(coll) |
---|
| 700 | |
---|
| 701 | write_node(doc, coll, "length", item.length, {"unit":item.length_unit}) |
---|
| 702 | |
---|
| 703 | for apert in item.aperture: |
---|
[579ba85] | 704 | ap = doc.createElement("aperture") |
---|
| 705 | if apert.name is not None: |
---|
| 706 | ap.setAttribute("name", str(apert.name)) |
---|
| 707 | if apert.type is not None: |
---|
| 708 | ap.setAttribute("type", str(apert.type)) |
---|
| 709 | coll.appendChild(ap) |
---|
[4c00964] | 710 | |
---|
| 711 | write_node(doc, ap, "distance", apert.distance, {"unit":apert.distance_unit}) |
---|
| 712 | |
---|
| 713 | size = doc.createElement("size") |
---|
[579ba85] | 714 | if apert.size_name is not None: |
---|
| 715 | size.setAttribute("name", str(apert.size_name)) |
---|
| 716 | written = write_node(doc, size, "x", apert.size.x, {"unit":apert.size_unit}) |
---|
| 717 | written = written | write_node(doc, size, "y", apert.size.y, {"unit":apert.size_unit}) |
---|
| 718 | written = written | write_node(doc, size, "z", apert.size.z, {"unit":apert.size_unit}) |
---|
| 719 | if written == True: |
---|
| 720 | ap.appendChild(size) |
---|
[4c00964] | 721 | |
---|
| 722 | # Detectors |
---|
| 723 | for item in datainfo.detector: |
---|
| 724 | det = doc.createElement("SASdetector") |
---|
[579ba85] | 725 | written = write_node(doc, det, "name", item.name) |
---|
| 726 | written = written | write_node(doc, det, "SDD", item.distance, {"unit":item.distance_unit}) |
---|
| 727 | written = written | write_node(doc, det, "slit_length", item.slit_length, {"unit":item.slit_length_unit}) |
---|
| 728 | if written == True: |
---|
| 729 | instr.appendChild(det) |
---|
[4c00964] | 730 | |
---|
| 731 | off = doc.createElement("offset") |
---|
[579ba85] | 732 | written = write_node(doc, off, "x", item.offset.x, {"unit":item.offset_unit}) |
---|
| 733 | written = written | write_node(doc, off, "y", item.offset.y, {"unit":item.offset_unit}) |
---|
| 734 | written = written | write_node(doc, off, "z", item.offset.z, {"unit":item.offset_unit}) |
---|
| 735 | if written == True: |
---|
| 736 | det.appendChild(off) |
---|
[4c00964] | 737 | |
---|
| 738 | center = doc.createElement("beam_center") |
---|
[579ba85] | 739 | written = write_node(doc, center, "x", item.beam_center.x, {"unit":item.beam_center_unit}) |
---|
| 740 | written = written | write_node(doc, center, "y", item.beam_center.y, {"unit":item.beam_center_unit}) |
---|
| 741 | written = written | write_node(doc, center, "z", item.beam_center.z, {"unit":item.beam_center_unit}) |
---|
| 742 | if written == True: |
---|
| 743 | det.appendChild(center) |
---|
| 744 | |
---|
[4c00964] | 745 | pix = doc.createElement("pixel_size") |
---|
[579ba85] | 746 | written = write_node(doc, pix, "x", item.pixel_size.x, {"unit":item.pixel_size_unit}) |
---|
| 747 | written = written | write_node(doc, pix, "y", item.pixel_size.y, {"unit":item.pixel_size_unit}) |
---|
| 748 | written = written | write_node(doc, pix, "z", item.pixel_size.z, {"unit":item.pixel_size_unit}) |
---|
| 749 | if written == True: |
---|
| 750 | det.appendChild(pix) |
---|
| 751 | |
---|
| 752 | ori = doc.createElement("orientation") |
---|
| 753 | written = write_node(doc, ori, "roll", item.orientation.x, {"unit":item.orientation_unit}) |
---|
| 754 | written = written | write_node(doc, ori, "pitch", item.orientation.y, {"unit":item.orientation_unit}) |
---|
| 755 | written = written | write_node(doc, ori, "yaw", item.orientation.z, {"unit":item.orientation_unit}) |
---|
| 756 | if written == True: |
---|
| 757 | det.appendChild(ori) |
---|
| 758 | |
---|
[4c00964] | 759 | |
---|
[579ba85] | 760 | # Processes info |
---|
[4c00964] | 761 | for item in datainfo.process: |
---|
| 762 | node = doc.createElement("SASprocess") |
---|
| 763 | entry_node.appendChild(node) |
---|
| 764 | |
---|
[579ba85] | 765 | write_node(doc, node, "name", item.name) |
---|
| 766 | write_node(doc, node, "date", item.date) |
---|
| 767 | write_node(doc, node, "description", item.description) |
---|
| 768 | for term in item.term: |
---|
| 769 | value = term['value'] |
---|
| 770 | del term['value'] |
---|
| 771 | write_node(doc, node, "term", value, term) |
---|
| 772 | for note in item.notes: |
---|
| 773 | write_node(doc, node, "SASprocessnote", note) |
---|
[4c00964] | 774 | |
---|
| 775 | |
---|
| 776 | # Write the file |
---|
| 777 | fd = open(filename, 'w') |
---|
| 778 | fd.write(doc.toprettyxml()) |
---|
| 779 | fd.close() |
---|
| 780 | |
---|
| 781 | |
---|
[8780e9a] | 782 | if __name__ == "__main__": |
---|
| 783 | logging.basicConfig(level=logging.ERROR, |
---|
| 784 | format='%(asctime)s %(levelname)s %(message)s', |
---|
| 785 | filename='cansas_reader.log', |
---|
| 786 | filemode='w') |
---|
| 787 | reader = Reader() |
---|
| 788 | print reader.read("../test/cansas1d.xml") |
---|
| 789 | |
---|
| 790 | |
---|
| 791 | |
---|