Changeset 76cd1ae in sasview for src/sans/dataloader/readers/cansas_reader.py
- Timestamp:
- Jan 14, 2014 3:53:36 PM (11 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 16bd5ca
- Parents:
- 083e993
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sans/dataloader/readers/cansas_reader.py
r1ce36f37 r76cd1ae 1 1 """ 2 CanSAS data reader - new recursive cansas Version.2 CanSAS data reader - new recursive cansas_version. 3 3 """ 4 4 ############################################################################ … … 19 19 from sans.dataloader.data_info import Data1D 20 20 from sans.dataloader.data_info import Collimation 21 from sans.dataloader.data_info import TransmissionSpectrum 21 22 from sans.dataloader.data_info import Detector 22 23 from sans.dataloader.data_info import Process 23 24 from sans.dataloader.data_info import Aperture 24 import xml_reader25 import sans.dataloader.readers.xml_reader as xml_reader 25 26 import xml.dom.minidom 26 from cansas_constants import cansasConstants27 from sans.dataloader.readers.cansas_constants import cansasConstants 27 28 28 29 _ZERO = 1e-16 … … 32 33 except: 33 34 HAS_CONVERTER = False 34 35 CANSAS_FORMAT = cansasConstants.CANSAS_FORMAT 36 CANSAS_NS = cansasConstants.CANSAS_NS 35 36 constants = cansasConstants() 37 CANSAS_FORMAT = constants.format 38 CANSAS_NS = constants.ns 37 39 ALLOW_ALL = True 38 40 … … 118 120 """ 119 121 ##CanSAS version - defaults to version 1.0 120 cansas Version = "1.0"122 cansas_version = "1.0" 121 123 ##Data reader 122 124 reader = xml_reader.XMLreader() … … 137 139 self.errors = [] 138 140 139 def is Cansas(self):141 def is_cansas(self): 140 142 """ 141 143 Checks to see if the xml file is a CanSAS file … … 143 145 if self.reader.validateXML(): 144 146 xmlns = self.reader.xmlroot.keys() 145 if (CANSAS_NS.get(self.cansasVersion).get("ns") == self.reader.xmlroot.get(xmlns[1]).rsplit(" ")[0]): 147 if (CANSAS_NS.get(self.cansas_version).get("ns") == \ 148 self.reader.xmlroot.get(xmlns[1]).rsplit(" ")[0]): 146 149 return True 147 150 return False … … 176 179 base = base_name.split("/sans/")[0] 177 180 178 # Load in thexml file and get the cansas version from the header181 # Load in xml file and get the cansas version from the header 179 182 self.reader.setXMLFile(xml) 180 183 root = self.reader.xmlroot 181 184 if root is None: 182 185 root = {} 183 self.cansas Version = root.get("version", "1.0")186 self.cansas_version = root.get("version", "1.0") 184 187 185 188 # Generic values for the cansas file based on the version 186 cansas_defaults = CANSAS_NS.get(self.cansasVersion, "1.0") 187 schema_path = "{0}/sans/dataloader/readers/schema/{1}".format(base, cansas_defaults.get("schema")).replace("\\", "/") 189 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 190 schema_path = "{0}/sans/dataloader/readers/schema/{1}".format\ 191 (base, cansas_defaults.get("schema")).replace("\\", "/") 188 192 189 193 # Link a schema to the XML file. … … 193 197 # Check the file matches the XML schema 194 198 try: 195 if self.is Cansas():196 # Get each SASentry from theXML file and add it to a list.199 if self.is_cansas(): 200 # Get each SASentry from XML file and add it to a list. 197 201 entry_list = root.xpath('/ns:SASroot/ns:SASentry', 198 202 namespaces={'ns': cansas_defaults.get("ns")}) 199 203 ns.append("SASentry") 200 204 201 # If there aremultiple files, modify the name for each is unique202 multiple Files = len(entry_list) - 1203 n= 0205 # If multiple files, modify the name for each is unique 206 multiple_files = len(entry_list) - 1 207 increment = 0 204 208 name = basename 205 209 # Parse each SASentry item 206 210 for entry in entry_list: 211 # Define a new Data1D object with zeroes for x and y 212 data1d = Data1D(x,y,dx,dy) 213 data1d.dxl = dxl 214 data1d.dxw = dxw 207 215 208 # Define a new Data1D object with zeroes for x and y209 data1D = Data1D(x,y,dx,dy)210 data1D.dxl = dxl211 data1D.dxw = dxw216 # If more than one SASentry, increment each in order 217 if multiple_files: 218 name += "_{0}".format(increment) 219 increment += 1 212 220 213 # If more than one SASentry, number each in order 214 if multipleFiles: 215 name += "_{0}".format(n) 216 n += 1 217 218 # Set the Data1D name and then parse the entry. The entry is appended to a list of entry values 219 data1D.filename = name 220 data1D.meta_data["loader"] = "CanSAS 1D" 221 return_value, extras = self._parse_entry(entry, ns, data1D) 221 # Set the Data1D name and then parse the entry. 222 # The entry is appended to a list of entry values 223 data1d.filename = name 224 data1d.meta_data["loader"] = "CanSAS 1D" 225 return_value, extras = \ 226 self._parse_entry(entry, ns, data1d) 222 227 del extras[:] 223 228 224 #Final cleanup - Remove empty nodes, verify array sizes are correct 229 # Final cleanup 230 # Remove empty nodes, verify array sizes are correct 225 231 for error in self.errors: 226 232 return_value.errors.append(error) … … 241 247 numpy.trim_zeros(return_value.dxl) 242 248 numpy.trim_zeros(return_value.dxw) 243 244 249 output.append(return_value) 245 250 else: … … 248 253 except: 249 254 # If the file does not match the schema, raise this error 250 raise RuntimeError, "%s cannot be read \ n" % xml255 raise RuntimeError, "%s cannot be read \increment" % xml 251 256 return output 252 257 # Return a list of parsed entries that dataloader can manage … … 254 259 255 260 def _create_unique_key(self, dictionary, name, i): 261 """ 262 Create a unique key value for any dictionary to prevent overwriting 263 264 265 :param dictionary: A dictionary with any number of entries 266 :param name: The index of the item to be added to dictionary 267 :param i: The number to be appended to the name 268 """ 256 269 if dictionary.get(name) is not None: 257 270 i += 1 … … 261 274 return name 262 275 263 def _iterate_namespace(self, ns): 276 def _iterate_namespace(self, namespace): 277 """ 278 Method to iterate through a cansas constants tree based on a list of 279 names 280 281 :param namespace: A list of names that match the tree structure of 282 cansas_constants 283 """ 264 284 # The current level to look through in cansas_constants. 265 285 current_level = CANSAS_FORMAT.get("SASentry") … … 268 288 ns_datatype = "content" 269 289 ns_optional = True 270 for name in n s:290 for name in namespace: 271 291 if name != "SASentry": 272 292 current_level = current_level.get("children").get(name, "") … … 277 297 cl_units_optional = current_level.get("units_required", "") 278 298 # Where are how to store the variable for the given namespace 279 # TheCANSAS_CONSTANTS tree is hierarchical, so is no value, inherit299 # CANSAS_CONSTANTS tree is hierarchical, so is no value, inherit 280 300 ns_variable = cl_variable if cl_variable != "" else ns_variable 281 301 ns_datatype = cl_datatype if cl_datatype != "" else ns_datatype 282 ns_optional = cl_units_optional if cl_units_optional != ns_optional else ns_optional 302 ns_optional = cl_units_optional if cl_units_optional != \ 303 ns_optional else ns_optional 283 304 return current_level, ns_variable, ns_datatype, ns_optional 284 305 285 def _unit_conversion(self, new_current_level, attr, data1D, node_value, optional = True): 306 307 def _unit_conversion(self, new_current_level, attr, data1d, \ 308 node_value, optional = True): 309 """ 310 A unit converter method used to convert the data included in the file 311 to the default units listed in data_info 312 313 :param new_current_level: cansas_constants level as returned by 314 _iterate_namespace 315 :param attr: The attributes of the node 316 :param data1d: Where the values will be saved 317 :param node_value: The value of the current dom node 318 :param optional: Boolean that says if the units are required 319 """ 286 320 value_unit = '' 287 321 if 'unit' in attr and new_current_level.get('unit') is not None: … … 291 325 default_unit = None 292 326 unitname = new_current_level.get("unit") 293 exec "default_unit = data1 D.{0}".format(unitname)327 exec "default_unit = data1d.{0}".format(unitname) 294 328 local_unit = attr['unit'] 295 if local_unit.lower() != default_unit.lower() and local_unit is not None\ 296 and local_unit.lower() != "none" and default_unit is not None: 329 if local_unit.lower() != default_unit.lower() and \ 330 local_unit is not None and local_unit.lower() != "none" and\ 331 default_unit is not None: 297 332 if HAS_CONVERTER == True: 298 333 try: 299 334 data_conv_q = Converter(attr['unit']) 300 335 value_unit = default_unit 301 exec "node_value = data_conv_q(node_value, units=data1 D.{0})".format(unitname)336 exec "node_value = data_conv_q(node_value, units=data1d.{0})".format(unitname) 302 337 except: 303 338 err_msg = "CanSAS reader: could not convert " 304 339 err_msg += "Q unit {0}; ".format(local_unit) 305 intermediate = "err_msg += \"expecting [{1}] {2}\".format(data1 D.{0}, sys.exc_info()[1])".format(unitname, "{0}", "{1}")340 intermediate = "err_msg += \"expecting [{1}] {2}\".format(data1d.{0}, sys.exc_info()[1])".format(unitname, "{0}", "{1}") 306 341 exec intermediate 307 342 self.errors.append(err_msg) … … 325 360 err_msg = "CanSAS reader: could not convert " 326 361 err_msg += "Q unit [%s]; " % attr['unit'], 327 exec "err_msg += \"expecting [%s]\n %s\" % (data1 D.{0}, sys.exc_info()[1])".format(unitname)362 exec "err_msg += \"expecting [%s]\n %s\" % (data1d.{0}, sys.exc_info()[1])".format(unitname) 328 363 self.errors.append(err_msg) 329 364 if optional: … … 336 371 return node_value, value_unit 337 372 338 def _parse_entry(self, dom, ns, data1 D, extras = []):373 def _parse_entry(self, dom, ns, data1d, extras = []): 339 374 """ 340 375 Parse a SASEntry - new recursive method for parsing the dom of … … 344 379 :param dom: dom object with a namespace base of ns 345 380 :param ns: A list of element names that lead up to the dom object 346 :param data1D: The data1D object that will be modified 381 :param data1d: The data1d object that will be modified 382 :param extras: Any values that should go into meta_data when data1d 383 is not a Data1D object 347 384 """ 348 385 349 386 # A portion of every namespace entry 350 base_ns = "{0}{1}{2}".format("{", CANSAS_NS.get(self.cansasVersion).get("ns"), "}") 387 base_ns = "{0}{1}{2}".format("{", \ 388 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 351 389 unit = '' 352 390 … … 361 399 362 400 # Look for special cases 363 save_data1 D = data1D401 save_data1d = data1d 364 402 if tagname == "SASdetector": 365 data1 D= Detector()403 data1d = Detector() 366 404 elif tagname == "SAScollimation": 367 data1D = Collimation() 405 data1d = Collimation() 406 elif tagname == "SAStransmission_spectrum": 407 data1d = TransmissionSpectrum() 368 408 elif tagname == "SASprocess": 369 data1 D= Process()409 data1d = Process() 370 410 for child in node: 371 411 if child.tag.replace(base_ns, "") == "term": 372 412 term_attr = {} 373 413 for attr in child.keys(): 374 term_attr[attr] = ' '.join(child.get(attr).split()) 414 term_attr[attr] = \ 415 ' '.join(child.get(attr).split()) 375 416 if child.text is not None: 376 term_attr['value'] = ' '.join(child.text.split()) 377 data1D.term.append(term_attr) 417 term_attr['value'] = \ 418 ' '.join(child.text.split()) 419 data1d.term.append(term_attr) 378 420 elif tagname == "aperture": 379 data1 D= Aperture()421 data1d = Aperture() 380 422 381 423 # Get where to store content 382 new_current_level, ns_variable, ns_datatype, optional = self._iterate_namespace(ns) 424 new_current_level, ns_var, ns_datatype, \ 425 optional = self._iterate_namespace(ns) 383 426 # If the element is a child element, recurse 384 427 if node.getchildren() is not None: 385 428 # Returned value is new Data1D object with all previous and new values in it. 386 data1 D, extras = self._parse_entry(node, ns, data1D, extras)429 data1d, extras = self._parse_entry(node, ns, data1d, extras) 387 430 388 431 #Get the information from the node … … 396 439 if ns_datatype == "float": 397 440 # If an empty value is given, store as zero. 398 if node_value is None or node_value.isspace() or node_value.lower() == "nan": 441 if node_value is None or node_value.isspace() \ 442 or node_value.lower() == "nan": 399 443 node_value = "0.0" 400 node_value, unit = self._unit_conversion(new_current_level, attr, data1D, node_value, optional) 444 node_value, unit = self._unit_conversion(new_current_level,\ 445 attr, data1d, node_value, optional) 401 446 402 447 # If appending to a dictionary (meta_data | run_name), name sure the key is unique 403 if ns_var iable== "{0}.meta_data[\"{2}\"] = \"{1}\"":404 # If we are within a Process, Detector, Collimation or Aperture instance, pull out old data1 D405 tagname = self._create_unique_key(data1 D.meta_data, tagname, 0)406 if isinstance(data1 D, Data1D) == False:407 store_me = ns_var iable.format("data1D", node_value, tagname)448 if ns_var == "{0}.meta_data[\"{2}\"] = \"{1}\"": 449 # If we are within a Process, Detector, Collimation or Aperture instance, pull out old data1d 450 tagname = self._create_unique_key(data1d.meta_data, tagname, 0) 451 if isinstance(data1d, Data1D) == False: 452 store_me = ns_var.format("data1d", node_value, tagname) 408 453 extras.append(store_me) 409 ns_var iable= None410 if ns_var iable== "{0}.run_name[\"{2}\"] = \"{1}\"":411 tagname = self._create_unique_key(data1 D.run_name, tagname, 0)454 ns_var = None 455 if ns_var == "{0}.run_name[\"{2}\"] = \"{1}\"": 456 tagname = self._create_unique_key(data1d.run_name, tagname, 0) 412 457 413 458 # Check for Data1D object and any extra commands to save 414 if isinstance(data1 D, Data1D):459 if isinstance(data1d, Data1D): 415 460 for item in extras: 416 461 exec item 417 462 # Don't bother saving empty information unless it is a float 418 if ns_variable is not None and node_value is not None and node_value.isspace() == False: 463 if ns_var is not None and node_value is not None and \ 464 node_value.isspace() == False: 419 465 # Format a string and then execute it. 420 store_me = ns_var iable.format("data1D", node_value, tagname)466 store_me = ns_var.format("data1d", node_value, tagname) 421 467 exec store_me 422 468 # Get attributes and process them … … 430 476 else: 431 477 attrib_value = node.attrib[key] 432 store_attr = attrib_variable.format("data1D", attrib_value, key) 478 store_attr = attrib_variable.format("data1d", \ 479 attrib_value, key) 433 480 exec store_attr 434 481 except AttributeError as e: … … 441 488 print(e, exc_type, fname, exc_tb.tb_lineno, tagname, exc_obj) 442 489 finally: 443 # Save special cases in original data1D object and then restore the data1D 490 # Save special cases in original data1d object 491 # then restore the data1d 444 492 if tagname_original == "SASdetector": 445 save_data1 D.detector.append(data1D)493 save_data1d.detector.append(data1d) 446 494 elif tagname_original == "SAScollimation": 447 save_data1D.collimation.append(data1D) 495 save_data1d.collimation.append(data1d) 496 elif tagname == "SAStransmission_spectrum": 497 save_data1d.trans_spectrum.append(data1d) 448 498 elif tagname_original == "SASprocess": 449 save_data1 D.process.append(data1D)499 save_data1d.process.append(data1d) 450 500 elif tagname_original == "aperture": 451 save_data1 D.aperture.append(data1D)501 save_data1d.aperture.append(data1d) 452 502 else: 453 save_data1 D = data1D454 data1 D = save_data1D503 save_data1d = data1d 504 data1d = save_data1d 455 505 # Remove tagname from ns to restore original base 456 506 ns.remove(tagname_original) 457 507 458 return data1 D, extras508 return data1d, extras 459 509 460 510 def _to_xml_doc(self, datainfo): … … 468 518 raise RuntimeError, "The cansas writer expects a Data1D instance" 469 519 470 ns = CANSAS_NS.get(self.cansas Version).get("ns")520 ns = CANSAS_NS.get(self.cansas_version).get("ns") 471 521 doc = xml.dom.minidom.Document() 472 522 main_node = doc.createElement("SASroot") 473 main_node.setAttribute("version", self.cansas Version)523 main_node.setAttribute("version", self.cansas_version) 474 524 main_node.setAttribute("xmlns", ns) 475 525 main_node.setAttribute("xmlns:xsi", … … 502 552 write_node(doc, pt, "I", datainfo.y[i], 503 553 {'unit': datainfo.y_unit}) 504 if datainfo.dy != None and len(datainfo.dy) > =i:554 if datainfo.dy != None and len(datainfo.dy) > i: 505 555 write_node(doc, pt, "Idev", datainfo.dy[i], 506 556 {'unit': datainfo.y_unit}) 507 if datainfo.dx != None and len(datainfo.dx) > =i:557 if datainfo.dx != None and len(datainfo.dx) > i: 508 558 write_node(doc, pt, "Qdev", datainfo.dx[i], 509 559 {'unit': datainfo.x_unit}) 510 if datainfo.dxw != None and len(datainfo.dxw) > =i:560 if datainfo.dxw != None and len(datainfo.dxw) > i: 511 561 write_node(doc, pt, "dQw", datainfo.dxw[i], 512 562 {'unit': datainfo.x_unit}) 513 if datainfo.dxl != None and len(datainfo.dxl) > =i:563 if datainfo.dxl != None and len(datainfo.dxl) > i: 514 564 write_node(doc, pt, "dQl", datainfo.dxl[i], 515 565 {'unit': datainfo.x_unit}) 516 566 517 567 # Transmission Spectrum Info 518 if len(datainfo.trans_spectrum.wavelength) > 0: 568 for i in range(len(datainfo.trans_spectrum)): 569 spectrum = datainfo.trans_spectrum[i] 519 570 node = doc.createElement("SAStransmission_spectrum") 520 571 entry_node.appendChild(node) 521 for i in range(len( datainfo.trans_spectrum.wavelength)):572 for i in range(len(spectrum.wavelength)): 522 573 pt = doc.createElement("Tdata") 523 574 node.appendChild(pt) 524 write_node(doc, pt, "Lambda", datainfo.trans_spectrum.wavelength[i], 525 {'unit': datainfo.trans_spectrum.wavelength_unit}) 526 write_node(doc, pt, "T", datainfo.trans_spectrum.transmission[i], 527 {'unit': datainfo.trans_spectrum.transmission_unit}) 528 if datainfo.trans_spectrum.transmission_deviation != None \ 529 and len(datainfo.trans_spectrum.transmission_deviation) >= i: 530 write_node(doc, pt, "Tdev", datainfo.trans_spectrum.transmission_deviation[i], 531 {'unit': datainfo.trans_spectrum.transmission_deviation_unit}) 575 write_node(doc, pt, "Lambda", spectrum.wavelength[i], 576 {'unit': spectrum.wavelength_unit}) 577 write_node(doc, pt, "T", spectrum.transmission[i], 578 {'unit': spectrum.transmission_unit}) 579 if spectrum.transmission_deviation != None \ 580 and len(spectrum.transmission_deviation) >= i: 581 write_node(doc, pt, "Tdev", \ 582 spectrum.transmission_deviation[i], \ 583 {'unit': spectrum.transmission_deviation_unit}) 532 584 533 585 # Sample info
Note: See TracChangeset
for help on using the changeset viewer.