- Timestamp:
- Mar 3, 2015 12:26:26 PM (10 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 226ce0b
- Parents:
- 7425bcf
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/dataloader/readers/cansas_reader.py
rb3efb7d raa720c4 5 5 #This software was developed by the University of Tennessee as part of the 6 6 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 7 #project funded by the US National Science Foundation. 8 #If you use DANSE applications to do scientific research that leads to 9 #publication, we ask that you acknowledge the use of the software with the 7 #project funded by the US National Science Foundation. 8 #If you use DANSE applications to do scientific research that leads to 9 #publication, we ask that you acknowledge the use of the software with the 10 10 #following sentence: 11 #This work benefited from DANSE software developed under NSF award DMR-0520547. 11 #This work benefited from DANSE software developed under NSF award DMR-0520547. 12 12 #copyright 2008,2009 University of Tennessee 13 13 ############################################################################# … … 42 42 HAS_CONVERTER = False 43 43 44 CONSTANTS = CansasConstants() 44 CONSTANTS = CansasConstants() 45 45 CANSAS_FORMAT = CONSTANTS.format 46 46 CANSAS_NS = CONSTANTS.names 47 47 ALLOW_ALL = True 48 48 49 50 # minidom used in functions called by outside classes51 import xml.dom.minidom52 49 # DO NOT REMOVE 53 50 # Called by outside packages: … … 57 54 """ 58 55 Get the first instance of the content of a xpath location. 59 56 60 57 :param location: xpath location 61 58 :param node: node to start at 62 59 63 60 :return: Element, or None 64 61 """ 65 nodes = node.xpath(location, 62 nodes = node.xpath(location, 66 63 namespaces={'ns': CANSAS_NS.get("1.0").get("ns")}) 67 64 68 65 if len(nodes) > 0: 69 66 return nodes[0] … … 82 79 :param value: value of the child text node 83 80 :param attr: attribute dictionary 84 81 85 82 :return: True if something was appended, otherwise False 86 83 """ … … 95 92 return True 96 93 return False 97 94 98 95 99 96 class Reader(XMLreader): 100 97 """ 101 98 Class to load cansas 1D XML files 102 99 103 100 :Dependencies: 104 101 The CanSAS reader requires PyXML 0.8.4 or later. … … 107 104 cansas_version = "1.0" 108 105 base_ns = "{cansas1d/1.0}" 109 106 110 107 logging = [] 111 108 errors = [] 112 109 113 110 type_name = "canSAS" 114 111 ## Wildcards … … 116 113 ## List of allowed extensions 117 114 ext = ['.xml', '.XML', '.svs', '.SVS'] 118 115 119 116 ## Flag to bypass extension check 120 117 allow_all = True 121 122 118 119 123 120 def __init__(self): 124 121 ## List of errors 125 122 self.errors = [] 126 123 self.encoding = None 127 128 124 125 129 126 def is_cansas(self, ext="xml"): 130 127 """ 131 128 Checks to see if the xml file is a CanSAS file 132 129 133 130 :param ext: The file extension of the data file 134 131 """ … … 142 139 return True 143 140 return False 144 145 141 142 146 143 def load_file_and_schema(self, xml_file): 147 144 """ 148 145 Loads the file and associates a schema, if a known schema exists 149 146 150 147 :param xml_file: The xml file path sent to Reader.read 151 148 """ 152 149 base_name = xml_reader.__file__ 153 base_name = base_name.replace("\\", "/")150 base_name = base_name.replace("\\", "/") 154 151 base = base_name.split("/sas/")[0] 155 152 156 153 # Load in xml file and get the cansas version from the header 157 154 self.set_xml_file(xml_file) 158 155 self.cansas_version = self.xmlroot.get("version", "1.0") 159 156 160 157 # Generic values for the cansas file based on the version 161 158 cansas_defaults = CANSAS_NS.get(self.cansas_version, "1.0") 162 159 schema_path = "{0}/sas/dataloader/readers/schema/{1}".format\ 163 160 (base, cansas_defaults.get("schema")).replace("\\", "/") 164 161 165 162 # Link a schema to the XML file. 166 163 self.set_schema(schema_path) 167 164 return cansas_defaults 168 169 165 166 170 167 def read(self, xml_file): 171 168 """ 172 169 Validate and read in an xml_file file in the canSAS format. 173 170 174 171 :param xml_file: A canSAS file path in proper XML format 175 172 """ 176 173 177 174 # output - Final list of Data1D objects 178 175 output = [] 179 176 # ns - Namespace hierarchy for current xml_file object 180 177 ns_list = [] 181 178 182 179 # Check that the file exists 183 180 if os.path.isfile(xml_file): … … 186 183 # If the file type is not allowed, return nothing 187 184 if extension in self.ext or self.allow_all: 188 # Get the file location of 185 # Get the file location of 189 186 cansas_defaults = self.load_file_and_schema(xml_file) 190 187 191 188 # Try to load the file, but raise an error if unable to. 192 189 # Check the file matches the XML schema … … 195 192 # Get each SASentry from XML file and add it to a list. 196 193 entry_list = self.xmlroot.xpath( 197 '/ns:SASroot/ns:SASentry', 198 namespaces ={'ns': cansas_defaults.get("ns")})194 '/ns:SASroot/ns:SASentry', 195 namespaces={'ns': cansas_defaults.get("ns")}) 199 196 ns_list.append("SASentry") 200 197 201 198 # If multiple files, modify the name for each is unique 202 199 increment = 0 203 200 # Parse each SASentry item 204 201 for entry in entry_list: 205 # Define a new Data1D object with zeroes for 202 # Define a new Data1D object with zeroes for 206 203 # x_vals and y_vals 207 data1d = Data1D(numpy.empty(0), numpy.empty(0), \204 data1d = Data1D(numpy.empty(0), numpy.empty(0), 208 205 numpy.empty(0), numpy.empty(0)) 209 206 data1d.dxl = numpy.empty(0) 210 207 data1d.dxw = numpy.empty(0) 211 208 212 209 # If more than one SASentry, increment each in order 213 210 name = basename … … 215 212 name += "_{0}".format(increment) 216 213 increment += 1 217 218 # Set the Data1D name and then parse the entry. 214 215 # Set the Data1D name and then parse the entry. 219 216 # The entry is appended to a list of entry values 220 217 data1d.filename = name 221 218 data1d.meta_data["loader"] = "CanSAS 1D" 222 219 223 220 # Get all preprocessing events and encoding 224 221 self.set_processing_instructions() 225 222 data1d.meta_data[PREPROCESS] = \ 226 223 self.processing_instructions 227 224 228 225 # Parse the XML file 229 226 return_value, extras = \ 230 227 self._parse_entry(entry, ns_list, data1d) 231 228 del extras[:] 232 229 233 230 return_value = self._final_cleanup(return_value) 234 231 output.append(return_value) … … 242 239 # Return a list of parsed entries that dataloader can manage 243 240 return None 244 245 241 242 246 243 def _final_cleanup(self, data1d): 247 244 """ 248 245 Final cleanup of the Data1D object to be sure it has all the 249 246 appropriate information needed for perspectives 250 247 251 248 :param data1d: Data1D object that has been populated 252 249 """ … … 272 269 numpy.trim_zeros(data1d.dxw) 273 270 return data1d 274 275 276 def _create_unique_key(self, dictionary, name, numb = 0): 271 272 def _create_unique_key(self, dictionary, name, numb=0): 277 273 """ 278 274 Create a unique key value for any dictionary to prevent overwriting 279 275 Recurses until a unique key value is found. 280 276 281 277 :param dictionary: A dictionary with any number of entries 282 278 :param name: The index of the item to be added to dictionary … … 289 285 name = self._create_unique_key(dictionary, name, numb) 290 286 return name 291 292 287 288 293 289 def _unit_conversion(self, node, new_current_level, data1d, \ 294 290 tagname, node_value): … … 296 292 A unit converter method used to convert the data included in the file 297 293 to the default units listed in data_info 298 299 :param new_current_level: cansas_constants level as returned by 294 295 :param new_current_level: cansas_constants level as returned by 300 296 iterate_namespace 301 297 :param attr: The attributes of the node … … 318 314 if HAS_CONVERTER == True: 319 315 ## Check local units - bad units raise KeyError 320 data_conv_q = Converter(local_unit)321 316 value_unit = default_unit 322 317 i_string = "node_value = data_conv_q" … … 350 345 node_value = "float({0})".format(node_value) 351 346 return node_value, value_unit 352 353 347 348 354 349 def _check_for_empty_data(self, data1d): 355 350 """ 356 351 Creates an empty data set if no data is passed to the reader 357 352 358 353 :param data1d: presumably a Data1D object 359 354 """ … … 370 365 data1d.dxw = dxw 371 366 return data1d 372 373 367 374 368 def _handle_special_cases(self, tagname, data1d, children): 375 369 """ 376 370 Handle cases where the data type in Data1D is a dictionary or list 377 371 378 372 :param tagname: XML tagname in use 379 373 :param data1d: The original Data1D object … … 404 398 data1d = self._check_for_empty_resolution(data1d, children) 405 399 return data1d 406 407 400 408 401 def _check_for_empty_resolution(self, data1d, children): 409 402 """ … … 434 427 data1d.dy = numpy.append(data1d.dy, 0.0) 435 428 return data1d 436 437 438 def _restore_original_case(self, 439 tagname_original, 440 tagname, 441 save_data1d, 429 430 def _restore_original_case(self, 431 tagname_original, 432 tagname, 433 save_data1d, 442 434 data1d): 443 435 """ 444 436 Save the special case data to the appropriate location and restore 445 437 the original Data1D object 446 438 447 439 :param tagname_original: Unmodified tagname for the node 448 440 :param tagname: modified tagname for the node … … 463 455 save_data1d = data1d 464 456 return save_data1d 465 466 457 467 458 def _handle_attributes(self, node, data1d, cs_values, tagname): 468 459 """ … … 473 464 for key in node.keys(): 474 465 try: 475 node_value, unit = self._get_node_value(node, cs_values, \476 466 _, unit = self._get_node_value(node, cs_values, \ 467 data1d, tagname) 477 468 cansas_attrib = \ 478 469 cs_values.current_level.get("attributes").get(key) … … 486 477 exec store_attr 487 478 except AttributeError: 488 pass 479 pass 489 480 return data1d 490 491 481 492 482 def _get_node_value(self, node, cs_values, data1d, tagname): 493 483 """ 494 484 Get the value of a node and any applicable units 495 485 496 486 :param node: The XML node to get the value of 497 487 :param cs_values: A CansasConstants.CurrentLevel object … … 507 497 if node_value is not None: 508 498 node_value = ' '.join(node_value.split()) 509 499 510 500 # If the value is a float, compile with units. 511 501 if cs_values.ns_datatype == "float": … … 517 507 node_value, units = self._unit_conversion(node, \ 518 508 cs_values.current_level, data1d, tagname, node_value) 519 509 520 510 # If the value is a timestamp, convert to a datetime object 521 511 elif cs_values.ns_datatype == "timestamp": … … 529 519 node_value = None 530 520 return node_value, units 531 532 521 533 522 def _parse_entry(self, dom, names=None, data1d=None, extras=None): 534 523 """ … … 536 525 the CanSAS data format. This will allow multiple data files 537 526 and extra nodes to be read in simultaneously. 538 527 539 528 :param dom: dom object with a namespace base of names 540 529 :param names: A list of element names that lead up to the dom object … … 543 532 is not a Data1D object 544 533 """ 545 534 546 535 if extras is None: 547 536 extras = [] 548 537 if names is None or names == []: 549 538 names = ["SASentry"] 550 539 551 540 data1d = self._check_for_empty_data(data1d) 552 541 553 542 self.base_ns = "{0}{1}{2}".format("{", \ 554 543 CANSAS_NS.get(self.cansas_version).get("ns"), "}") 555 544 tagname = '' 556 545 tagname_original = '' 557 546 558 547 # Go through each child in the parent element 559 548 for node in dom: … … 570 559 children = None 571 560 save_data1d = data1d 572 561 573 562 # Look for special cases 574 563 data1d = self._handle_special_cases(tagname, data1d, children) 575 564 576 565 # Get where to store content 577 566 cs_values = CONSTANTS.iterate_namespace(names) 578 567 # If the element is a child element, recurse 579 568 if children is not None: 580 # Returned value is new Data1D object with all previous and 569 # Returned value is new Data1D object with all previous and 581 570 # new values in it. 582 data1d, extras = self._parse_entry(node, 571 data1d, extras = self._parse_entry(node, 583 572 names, data1d, extras) 584 573 585 574 #Get the information from the node 586 575 node_value, _ = self._get_node_value(node, cs_values, \ 587 576 data1d, tagname) 588 577 589 578 # If appending to a dictionary (meta_data | run_name) 590 579 # make sure the key is unique 591 580 if cs_values.ns_variable == "{0}.meta_data[\"{2}\"] = \"{1}\"": 592 # If we are within a Process, Detector, Collimation or 581 # If we are within a Process, Detector, Collimation or 593 582 # Aperture instance, pull out old data1d 594 583 tagname = self._create_unique_key(data1d.meta_data, \ … … 602 591 tagname = self._create_unique_key(data1d.run_name, \ 603 592 tagname, 0) 604 593 605 594 # Check for Data1D object and any extra commands to save 606 595 if isinstance(data1d, Data1D): … … 618 607 data1d = self._handle_attributes(node, data1d, cs_values, \ 619 608 tagname) 620 609 621 610 except TypeError: 622 611 pass … … 640 629 names.remove(tagname_original) 641 630 return data1d, extras 642 643 631 644 632 def _get_pi_string(self): 645 633 """ … … 656 644 pi_string = "" 657 645 return pi_string 658 659 646 660 647 def _create_main_node(self): 661 648 """ … … 673 660 "version" : version} 674 661 nsmap = {'xsi' : xsi, None: n_s} 675 676 main_node = self.create_element("{" + n_s + "}SASroot", \ 677 attrib = attrib, \ 678 nsmap = nsmap) 662 663 main_node = self.create_element("{" + n_s + "}SASroot", 664 attrib=attrib, nsmap=nsmap) 679 665 return main_node 680 681 666 682 667 def _write_run_names(self, datainfo, entry_node): 683 668 """ 684 669 Writes the run names to the XML file 685 670 686 671 :param datainfo: The Data1D object the information is coming from 687 672 :param entry_node: lxml node ElementTree object to be appended to … … 696 681 runname = {'name': datainfo.run_name[item]} 697 682 self.write_node(entry_node, "Run", item, runname) 698 699 683 700 684 def _write_data(self, datainfo, entry_node): 701 685 """ 702 686 Writes the I and Q data to the XML file 703 687 704 688 :param datainfo: The Data1D object the information is coming from 705 689 :param entry_node: lxml node ElementTree object to be appended to … … 707 691 node = self.create_element("SASdata") 708 692 self.append(node, entry_node) 709 693 710 694 for i in range(len(datainfo.x)): 711 695 point = self.create_element("Idata") 712 696 node.append(point) 713 self.write_node(point, "Q", datainfo.x[i], \714 697 self.write_node(point, "Q", datainfo.x[i], 698 {'unit': datainfo.x_unit}) 715 699 if len(datainfo.y) >= i: 716 700 self.write_node(point, "I", datainfo.y[i], 717 {'unit': datainfo.y_unit})701 {'unit': datainfo.y_unit}) 718 702 if datainfo.dy != None and len(datainfo.dy) > i: 719 703 self.write_node(point, "Idev", datainfo.dy[i], 720 {'unit': datainfo.y_unit})704 {'unit': datainfo.y_unit}) 721 705 if datainfo.dx != None and len(datainfo.dx) > i: 722 706 self.write_node(point, "Qdev", datainfo.dx[i], 723 {'unit': datainfo.x_unit})707 {'unit': datainfo.x_unit}) 724 708 if datainfo.dxw != None and len(datainfo.dxw) > i: 725 709 self.write_node(point, "dQw", datainfo.dxw[i], 726 {'unit': datainfo.x_unit})710 {'unit': datainfo.x_unit}) 727 711 if datainfo.dxl != None and len(datainfo.dxl) > i: 728 712 self.write_node(point, "dQl", datainfo.dxl[i], 729 {'unit': datainfo.x_unit}) 730 731 732 713 {'unit': datainfo.x_unit}) 714 733 715 def _write_trans_spectrum(self, datainfo, entry_node): 734 716 """ 735 717 Writes the transmission spectrum data to the XML file 736 718 737 719 :param datainfo: The Data1D object the information is coming from 738 720 :param entry_node: lxml node ElementTree object to be appended to … … 741 723 spectrum = datainfo.trans_spectrum[i] 742 724 node = self.create_element("SAStransmission_spectrum", 743 725 {"name" : spectrum.name}) 744 726 self.append(node, entry_node) 745 727 if isinstance(spectrum.timestamp, datetime.datetime): … … 748 730 point = self.create_element("Tdata") 749 731 node.append(point) 750 self.write_node(point, "Lambda", spectrum.wavelength[i], 751 {'unit': spectrum.wavelength_unit})752 self.write_node(point, "T", spectrum.transmission[i], 753 {'unit': spectrum.transmission_unit})732 self.write_node(point, "Lambda", spectrum.wavelength[i], 733 {'unit': spectrum.wavelength_unit}) 734 self.write_node(point, "T", spectrum.transmission[i], 735 {'unit': spectrum.transmission_unit}) 754 736 if spectrum.transmission_deviation != None \ 755 737 and len(spectrum.transmission_deviation) >= i: 756 self.write_node(point, "Tdev", \757 spectrum.transmission_deviation[i], \758 {'unit': spectrum.transmission_deviation_unit})759 760 738 self.write_node(point, "Tdev", 739 spectrum.transmission_deviation[i], 740 {'unit': 741 spectrum.transmission_deviation_unit}) 742 761 743 def _write_sample_info(self, datainfo, entry_node): 762 744 """ 763 745 Writes the sample information to the XML file 764 746 765 747 :param datainfo: The Data1D object the information is coming from 766 748 :param entry_node: lxml node ElementTree object to be appended to … … 768 750 sample = self.create_element("SASsample") 769 751 if datainfo.sample.name is not None: 770 self.write_attribute(sample, 771 "name", 772 str(datainfo.sample.name)) 752 self.write_attribute(sample, "name", 753 str(datainfo.sample.name)) 773 754 self.append(sample, entry_node) 774 755 self.write_node(sample, "ID", str(datainfo.sample.ID)) 775 756 self.write_node(sample, "thickness", datainfo.sample.thickness, 776 {"unit": datainfo.sample.thickness_unit})757 {"unit": datainfo.sample.thickness_unit}) 777 758 self.write_node(sample, "transmission", datainfo.sample.transmission) 778 759 self.write_node(sample, "temperature", datainfo.sample.temperature, 779 {"unit": datainfo.sample.temperature_unit})780 760 {"unit": datainfo.sample.temperature_unit}) 761 781 762 pos = self.create_element("position") 782 written = self.write_node(pos, 783 "x", 763 written = self.write_node(pos, 764 "x", 784 765 datainfo.sample.position.x, 785 766 {"unit": datainfo.sample.position_unit}) 786 written = written | self.write_node(pos, 787 "y", 788 datainfo.sample.position.y, 789 {"unit": datainfo.sample.position_unit}) 790 written = written | self.write_node(pos, 791 "z", 792 datainfo.sample.position.z, 793 {"unit": datainfo.sample.position_unit}) 767 written = written | self.write_node( \ 768 pos, "y", datainfo.sample.position.y, 769 {"unit": datainfo.sample.position_unit}) 770 written = written | self.write_node( \ 771 pos, "z", datainfo.sample.position.z, 772 {"unit": datainfo.sample.position_unit}) 794 773 if written == True: 795 774 self.append(pos, sample) 796 775 797 776 ori = self.create_element("orientation") 798 777 written = self.write_node(ori, "roll", 799 778 datainfo.sample.orientation.x, 800 779 {"unit": datainfo.sample.orientation_unit}) 801 written = written | self.write_node( ori, "pitch",802 803 804 written = written | self.write_node( ori, "yaw",805 806 780 written = written | self.write_node( \ 781 ori, "pitch", datainfo.sample.orientation.y, 782 {"unit": datainfo.sample.orientation_unit}) 783 written = written | self.write_node( \ 784 ori, "yaw", datainfo.sample.orientation.z, 785 {"unit": datainfo.sample.orientation_unit}) 807 786 if written == True: 808 787 self.append(ori, sample) 809 788 810 789 for item in datainfo.sample.details: 811 790 self.write_node(sample, "details", item) 812 813 791 814 792 def _write_instrument(self, datainfo, entry_node): 815 793 """ 816 794 Writes the instrumental information to the XML file 817 795 818 796 :param datainfo: The Data1D object the information is coming from 819 797 :param entry_node: lxml node ElementTree object to be appended to … … 823 801 self.write_node(instr, "name", datainfo.instrument) 824 802 return instr 825 826 803 827 804 def _write_source(self, datainfo, instr): 828 805 """ 829 806 Writes the source information to the XML file 830 807 831 808 :param datainfo: The Data1D object the information is coming from 832 809 :param instr: instrument node to be appended to … … 834 811 source = self.create_element("SASsource") 835 812 if datainfo.source.name is not None: 836 self.write_attribute(source, 837 "name", 838 str(datainfo.source.name)) 813 self.write_attribute(source, "name", 814 str(datainfo.source.name)) 839 815 self.append(source, instr) 840 816 if datainfo.source.radiation == None or datainfo.source.radiation == '': 841 817 datainfo.source.radiation = "neutron" 842 818 self.write_node(source, "radiation", datainfo.source.radiation) 843 819 844 820 size = self.create_element("beam_size") 845 821 if datainfo.source.beam_size_name is not None: 846 self.write_attribute(size, 847 "name",848 str(datainfo.source.beam_size_name))849 written = self.write_node(size, "x", datainfo.source.beam_size.x,850 851 written = written | self.write_node( size, "y",852 853 854 written = written | self.write_node( size, "z",855 856 822 self.write_attribute(size, "name", 823 str(datainfo.source.beam_size_name)) 824 written = self.write_node( \ 825 size, "x", datainfo.source.beam_size.x, 826 {"unit": datainfo.source.beam_size_unit}) 827 written = written | self.write_node( \ 828 size, "y", datainfo.source.beam_size.y, 829 {"unit": datainfo.source.beam_size_unit}) 830 written = written | self.write_node( \ 831 size, "z", datainfo.source.beam_size.z, 832 {"unit": datainfo.source.beam_size_unit}) 857 833 if written == True: 858 834 self.append(size, source) 859 835 860 836 self.write_node(source, "beam_shape", datainfo.source.beam_shape) 861 837 self.write_node(source, "wavelength", 862 datainfo.source.wavelength,863 {"unit": datainfo.source.wavelength_unit})838 datainfo.source.wavelength, 839 {"unit": datainfo.source.wavelength_unit}) 864 840 self.write_node(source, "wavelength_min", 865 datainfo.source.wavelength_min,866 {"unit": datainfo.source.wavelength_min_unit})841 datainfo.source.wavelength_min, 842 {"unit": datainfo.source.wavelength_min_unit}) 867 843 self.write_node(source, "wavelength_max", 868 datainfo.source.wavelength_max,869 {"unit": datainfo.source.wavelength_max_unit})844 datainfo.source.wavelength_max, 845 {"unit": datainfo.source.wavelength_max_unit}) 870 846 self.write_node(source, "wavelength_spread", 871 datainfo.source.wavelength_spread, 872 {"unit": datainfo.source.wavelength_spread_unit}) 873 874 875 def _write_collimation(self, datainfo, instr): 847 datainfo.source.wavelength_spread, 848 {"unit": datainfo.source.wavelength_spread_unit}) 849 850 def _write_collimation(self, datainfo, instr): 876 851 """ 877 852 Writes the collimation information to the XML file 878 853 879 854 :param datainfo: The Data1D object the information is coming from 880 855 :param instr: lxml node ElementTree object to be appended to … … 888 863 self.write_attribute(coll, "name", str(item.name)) 889 864 self.append(coll, instr) 890 865 891 866 self.write_node(coll, "length", item.length, 892 {"unit": item.length_unit})893 867 {"unit": item.length_unit}) 868 894 869 for aperture in item.aperture: 895 870 apert = self.create_element("aperture") … … 899 874 self.write_attribute(apert, "type", str(aperture.type)) 900 875 self.append(apert, coll) 901 876 902 877 size = self.create_element("size") 903 878 if aperture.size_name is not None: 904 self.write_attribute(size, 905 "name", 906 str(aperture.size_name)) 879 self.write_attribute(size, "name", 880 str(aperture.size_name)) 907 881 written = self.write_node(size, "x", aperture.size.x, 908 {"unit": aperture.size_unit}) 909 written = written | self.write_node(size, "y", aperture.size.y, 910 {"unit": aperture.size_unit}) 911 written = written | self.write_node(size, "z", aperture.size.z, 912 {"unit": aperture.size_unit}) 882 {"unit": aperture.size_unit}) 883 written = written | self.write_node( \ 884 size, "y", aperture.size.y, 885 {"unit": aperture.size_unit}) 886 written = written | self.write_node( \ 887 size, "z", aperture.size.z, 888 {"unit": aperture.size_unit}) 913 889 if written == True: 914 890 self.append(size, apert) 915 891 916 892 self.write_node(apert, "distance", aperture.distance, 917 {"unit": aperture.distance_unit})918 919 893 {"unit": aperture.distance_unit}) 894 895 920 896 def _write_detectors(self, datainfo, instr): 921 897 """ 922 898 Writes the detector information to the XML file 923 899 924 900 :param datainfo: The Data1D object the information is coming from 925 901 :param inst: lxml instrument node to be appended to … … 929 905 det.name = "" 930 906 datainfo.detector.append(det) 931 907 932 908 for item in datainfo.detector: 933 909 det = self.create_element("SASdetector") 934 910 written = self.write_node(det, "name", item.name) 935 911 written = written | self.write_node(det, "SDD", item.distance, 936 {"unit": item.distance_unit})912 {"unit": item.distance_unit}) 937 913 if written == True: 938 914 self.append(det, instr) 939 915 940 916 off = self.create_element("offset") 941 917 written = self.write_node(off, "x", item.offset.x, 942 {"unit": item.offset_unit})918 {"unit": item.offset_unit}) 943 919 written = written | self.write_node(off, "y", item.offset.y, 944 {"unit": item.offset_unit})920 {"unit": item.offset_unit}) 945 921 written = written | self.write_node(off, "z", item.offset.z, 946 {"unit": item.offset_unit})922 {"unit": item.offset_unit}) 947 923 if written == True: 948 924 self.append(off, det) 949 925 950 926 ori = self.create_element("orientation") 951 927 written = self.write_node(ori, "roll", item.orientation.x, 952 {"unit": item.orientation_unit})928 {"unit": item.orientation_unit}) 953 929 written = written | self.write_node(ori, "pitch", 954 item.orientation.y,955 {"unit": item.orientation_unit})930 item.orientation.y, 931 {"unit": item.orientation_unit}) 956 932 written = written | self.write_node(ori, "yaw", 957 item.orientation.z,958 {"unit": item.orientation_unit})933 item.orientation.z, 934 {"unit": item.orientation_unit}) 959 935 if written == True: 960 936 self.append(ori, det) 961 937 962 938 center = self.create_element("beam_center") 963 939 written = self.write_node(center, "x", item.beam_center.x, 964 {"unit": item.beam_center_unit})940 {"unit": item.beam_center_unit}) 965 941 written = written | self.write_node(center, "y", 966 item.beam_center.y,967 {"unit": item.beam_center_unit})942 item.beam_center.y, 943 {"unit": item.beam_center_unit}) 968 944 written = written | self.write_node(center, "z", 969 item.beam_center.z,970 {"unit": item.beam_center_unit})945 item.beam_center.z, 946 {"unit": item.beam_center_unit}) 971 947 if written == True: 972 948 self.append(center, det) 973 949 974 950 pix = self.create_element("pixel_size") 975 951 written = self.write_node(pix, "x", item.pixel_size.x, 976 {"unit": item.pixel_size_unit})952 {"unit": item.pixel_size_unit}) 977 953 written = written | self.write_node(pix, "y", item.pixel_size.y, 978 {"unit": item.pixel_size_unit})954 {"unit": item.pixel_size_unit}) 979 955 written = written | self.write_node(pix, "z", item.pixel_size.z, 980 {"unit": item.pixel_size_unit})956 {"unit": item.pixel_size_unit}) 981 957 written = written | self.write_node(det, "slit_length", 982 item.slit_length,983 {"unit": item.slit_length_unit})958 item.slit_length, 959 {"unit": item.slit_length_unit}) 984 960 if written == True: 985 961 self.append(pix, det) 986 987 962 988 963 def _write_process_notes(self, datainfo, entry_node): 989 964 """ 990 965 Writes the process notes to the XML file 991 966 992 967 :param datainfo: The Data1D object the information is coming from 993 968 :param entry_node: lxml node ElementTree object to be appended to 994 969 995 970 """ 996 971 for item in datainfo.process: 997 972 node = self.create_element("SASprocess") 998 973 self.append(node, entry_node) 999 1000 974 self.write_node(node, "name", item.name) 1001 975 self.write_node(node, "date", item.date) … … 1009 983 if len(item.notes) == 0: 1010 984 self.write_node(node, "SASprocessnote", "") 1011 1012 985 1013 986 def _write_notes(self, datainfo, entry_node): 1014 987 """ 1015 988 Writes the notes to the XML file and creates an empty note if none 1016 989 exist 1017 990 1018 991 :param datainfo: The Data1D object the information is coming from 1019 992 :param entry_node: lxml node ElementTree object to be appended to 1020 993 1021 994 """ 1022 995 if len(datainfo.notes) == 0: … … 1028 1001 self.write_text(node, item) 1029 1002 self.append(node, entry_node) 1030 1031 1003 1032 1004 def _check_origin(self, entry_node, doc): 1033 1005 """ … … 1036 1008 If the calling function was not the cansas reader, return a minidom 1037 1009 object rather than an lxml object. 1038 1010 1039 1011 :param entry_node: lxml node ElementTree object to be appended to 1040 1012 :param doc: entire xml tree … … 1052 1024 entry_node = node_list.item(0) 1053 1025 return entry_node 1054 1055 1026 1056 1027 def _to_xml_doc(self, datainfo): 1057 1028 """ 1058 1029 Create an XML document to contain the content of a Data1D 1059 1030 1060 1031 :param datainfo: Data1D object 1061 1032 """ 1062 1033 if not issubclass(datainfo.__class__, Data1D): 1063 1034 raise RuntimeError, "The cansas writer expects a Data1D instance" 1064 1035 1065 1036 # Get PIs and create root element 1066 1037 pi_string = self._get_pi_string() 1067 1038 1068 1039 # Define namespaces and create SASroot object 1069 1040 main_node = self._create_main_node() 1070 1041 1071 1042 # Create ElementTree, append SASroot and apply processing instructions 1072 1043 base_string = pi_string + self.to_string(main_node) 1073 1044 base_element = self.create_element_from_string(base_string) 1074 1045 doc = self.create_tree(base_element) 1075 1046 1076 1047 # Create SASentry Element 1077 1048 entry_node = self.create_element("SASentry") 1078 1049 root = doc.getroot() 1079 1050 root.append(entry_node) 1080 1051 1081 1052 # Add Title to SASentry 1082 1053 self.write_node(entry_node, "Title", datainfo.title) 1083 1054 1084 1055 # Add Run to SASentry 1085 1056 self._write_run_names(datainfo, entry_node) 1086 1057 1087 1058 # Add Data info to SASEntry 1088 1059 self._write_data(datainfo, entry_node) 1089 1060 1090 1061 # Transmission Spectrum Info 1091 1062 self._write_trans_spectrum(datainfo, entry_node) 1092 1063 1093 1064 # Sample info 1094 1065 self._write_sample_info(datainfo, entry_node) 1095 1066 1096 1067 # Instrument info 1097 1068 instr = self._write_instrument(datainfo, entry_node) 1098 1069 1099 1070 # Source 1100 1071 self._write_source(datainfo, instr) 1101 1072 1102 1073 # Collimation 1103 1074 self._write_collimation(datainfo, instr) … … 1105 1076 # Detectors 1106 1077 self._write_detectors(datainfo, instr) 1107 1078 1108 1079 # Processes info 1109 1080 self._write_process_notes(datainfo, entry_node) 1110 1081 1111 1082 # Note info 1112 self._write_notes(datainfo, entry_node) 1113 1083 self._write_notes(datainfo, entry_node) 1084 1114 1085 # Return the document, and the SASentry node associated with 1115 1086 # the data we just wrote 1116 1087 # If the calling function was not the cansas reader, return a minidom 1117 # object rather than an lxml object. 1088 # object rather than an lxml object. 1118 1089 entry_node = self._check_origin(entry_node, doc) 1119 1090 1120 1091 return doc, entry_node 1121 1122 1092 1123 1093 def write_node(self, parent, name, value, attr=None): 1124 1094 """ … … 1128 1098 :param value: value of the child text node 1129 1099 :param attr: attribute dictionary 1130 1100 1131 1101 :return: True if something was appended, otherwise False 1132 1102 """ … … 1135 1105 return True 1136 1106 return False 1137 1138 1107 1139 1108 def write(self, filename, datainfo): 1140 1109 """ 1141 1110 Write the content of a Data1D as a CanSAS XML file 1142 1111 1143 1112 :param filename: name of the file to write 1144 1113 :param datainfo: Data1D object … … 1153 1122 pretty_print=True, xml_declaration=True) 1154 1123 file_ref.close() 1155 1156 1124 1157 1125 # DO NOT REMOVE - used in saving and loading panel states. 1158 1126 def _store_float(self, location, node, variable, storage, optional=True): … … 1162 1130 with the destination. The value is expected to 1163 1131 be a float. 1164 1132 1165 1133 The xpath location might or might not exist. 1166 1134 If it does not exist, nothing is done 1167 1135 1168 1136 :param location: xpath location to fetch 1169 1137 :param node: node to read the data from … … 1180 1148 except: 1181 1149 value = None 1182 1150 1183 1151 if value is not None: 1184 1152 # If the entry has units, check to see that they are … … 1193 1161 try: 1194 1162 conv = Converter(units) 1195 exec "storage.%s = %g" % (variable,1196 1163 exec "storage.%s = %g" % \ 1164 (variable, conv(value, units=local_unit)) 1197 1165 except: 1198 1166 _, exc_value, _ = sys.exc_info() … … 1218 1186 else: 1219 1187 exec "storage.%s = value" % variable 1220 1221 1188 1222 1189 # DO NOT REMOVE - used in saving and loading panel states. 1223 1190 def _store_content(self, location, node, variable, storage): … … 1225 1192 Get the content of a xpath location and store 1226 1193 the result. The value is treated as a string. 1227 1194 1228 1195 The xpath location might or might not exist. 1229 1196 If it does not exist, nothing is done 1230 1197 1231 1198 :param location: xpath location to fetch 1232 1199 :param node: node to read the data from 1233 1200 :param variable: name of the data member to store it in [string] 1234 1201 :param storage: data object that has the 'variable' data member 1235 1202 1236 1203 :return: return a list of errors 1237 1204 """
Note: See TracChangeset
for help on using the changeset viewer.