Changeset b5e9ce26 in sasview for src/sas/sascalc/dataloader/readers
- Timestamp:
- Feb 14, 2017 3:27:46 PM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 2ffe241
- Parents:
- 68adf86 (diff), 2510b9b (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - git-author:
- Jeff Krzywon <krzywon@…> (02/14/17 15:27:46)
- git-committer:
- GitHub <noreply@…> (02/14/17 15:27:46)
- Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader.py
rad4632c rb5e9ce26 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 22 from sas.sascalc.dataloader.data_info import Data1D, Data2D, DataInfo, \ 23 plottable_1D, plottable_2D 24 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, \ 25 Detector, Process, Aperture 26 from sas.sascalc.dataloader.data_info import \ 27 combine_data_info_with_plottable as combine_data 25 28 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 26 29 from sas.sascalc.dataloader.readers.xml_reader import XMLreader … … 56 59 The CanSAS reader requires PyXML 0.8.4 or later. 57 60 """ 58 # #CanSAS version - defaults to version 1.061 # CanSAS version - defaults to version 1.0 59 62 cansas_version = "1.0" 60 63 base_ns = "{cansas1d/1.0}" … … 63 66 invalid = True 64 67 frm = "" 65 # #Log messages and errors68 # Log messages and errors 66 69 logging = None 67 70 errors = set() 68 # #Namespace hierarchy for current xml_file object71 # Namespace hierarchy for current xml_file object 69 72 names = None 70 73 ns_list = None 71 # #Temporary storage location for loading multiple data sets in a single file74 # Temporary storage location for loading multiple data sets in a single file 72 75 current_datainfo = None 73 76 current_dataset = None 74 77 current_data1d = None 75 78 data = None 76 # #List of data1D objects to be sent back to SasView79 # List of data1D objects to be sent back to SasView 77 80 output = None 78 # #Wildcards81 # Wildcards 79 82 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] 80 # #List of allowed extensions83 # List of allowed extensions 81 84 ext = ['.xml', '.XML', '.svs', '.SVS'] 82 # #Flag to bypass extension check85 # Flag to bypass extension check 83 86 allow_all = True 84 87 … … 220 223 self.parent_class = tagname_original 221 224 if tagname == 'SASdata': 222 self._initialize_new_data_set() 223 ## Recursion step to access data within the group 225 self._initialize_new_data_set(node) 226 if isinstance(self.current_dataset, plottable_2D): 227 x_bins = attr.get("x_bins", "") 228 y_bins = attr.get("y_bins", "") 229 if x_bins is not "" and y_bins is not "": 230 self.current_dataset.shape = (x_bins, y_bins) 231 else: 232 self.current_dataset.shape = () 233 # Recursion step to access data within the group 224 234 self._parse_entry(node, True) 225 235 if tagname == "SASsample": … … 234 244 self.add_intermediate() 235 245 else: 236 data_point, unit = self._get_node_value(node, tagname) 237 238 ## If this is a dataset, store the data appropriately 246 if isinstance(self.current_dataset, plottable_2D): 247 data_point = node.text 248 unit = attr.get('unit', '') 249 else: 250 data_point, unit = self._get_node_value(node, tagname) 251 252 # If this is a dataset, store the data appropriately 239 253 if tagname == 'Run': 240 254 self.current_datainfo.run_name[data_point] = name … … 245 259 self.current_datainfo.notes.append(data_point) 246 260 247 # # I and Q Data248 elif tagname == 'I' :261 # I and Q - 1D data 262 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 249 263 unit_list = unit.split(" | ") 250 264 if len(unit_list) > 1: … … 253 267 self.current_dataset.yaxis("Intensity", unit) 254 268 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 255 elif tagname == 'Idev' :269 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 256 270 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 257 271 elif tagname == 'Q': … … 277 291 self.current_datainfo.sample.zacceptance = (data_point, unit) 278 292 279 ## Sample Information 293 # I and Qx, Qy - 2D data 294 elif tagname == 'I' and isinstance(self.current_dataset, plottable_2D): 295 self.current_dataset.yaxis("Intensity", unit) 296 self.current_dataset.data = np.fromstring(data_point, dtype=float, sep=",") 297 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_2D): 298 self.current_dataset.err_data = np.fromstring(data_point, dtype=float, sep=",") 299 elif tagname == 'Qx': 300 self.current_dataset.xaxis("Qx", unit) 301 self.current_dataset.qx_data = np.fromstring(data_point, dtype=float, sep=",") 302 elif tagname == 'Qy': 303 self.current_dataset.yaxis("Qy", unit) 304 self.current_dataset.qy_data = np.fromstring(data_point, dtype=float, sep=",") 305 elif tagname == 'Qxdev': 306 self.current_dataset.xaxis("Qxdev", unit) 307 self.current_dataset.dqx_data = np.fromstring(data_point, dtype=float, sep=",") 308 elif tagname == 'Qydev': 309 self.current_dataset.yaxis("Qydev", unit) 310 self.current_dataset.dqy_data = np.fromstring(data_point, dtype=float, sep=",") 311 elif tagname == 'Mask': 312 inter = [item == "1" for item in data_point.split(",")] 313 self.current_dataset.mask = np.asarray(inter, dtype=bool) 314 315 # Sample Information 280 316 elif tagname == 'ID' and self.parent_class == 'SASsample': 281 317 self.current_datainfo.sample.ID = data_point … … 311 347 self.current_datainfo.sample.orientation_unit = unit 312 348 313 # #Instrumental Information349 # Instrumental Information 314 350 elif tagname == 'name' and self.parent_class == 'SASinstrument': 315 351 self.current_datainfo.instrument = data_point 316 # #Detector Information352 # Detector Information 317 353 elif tagname == 'name' and self.parent_class == 'SASdetector': 318 354 self.detector.name = data_point … … 359 395 self.detector.orientation.z = data_point 360 396 self.detector.orientation_unit = unit 361 # #Collimation and Aperture397 # Collimation and Aperture 362 398 elif tagname == 'length' and self.parent_class == 'SAScollimation': 363 399 self.collimation.length = data_point … … 378 414 self.collimation.size_unit = unit 379 415 380 # #Process Information416 # Process Information 381 417 elif tagname == 'name' and self.parent_class == 'SASprocess': 382 418 self.process.name = data_point … … 398 434 self.process.term.append(dic) 399 435 400 # #Transmission Spectrum436 # Transmission Spectrum 401 437 elif tagname == 'T' and self.parent_class == 'Tdata': 402 438 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) … … 409 445 self.transspectrum.wavelength_unit = unit 410 446 411 # #Source Information447 # Source Information 412 448 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 413 449 self.current_datainfo.source.wavelength = data_point … … 436 472 self.current_datainfo.source.beam_shape = data_point 437 473 438 # #Everything else goes in meta_data474 # Everything else goes in meta_data 439 475 else: 440 476 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) … … 450 486 self.add_data_set() 451 487 empty = None 452 if self.output[0].dx is not None:453 self.output[0].dxl = np.empty(0)454 self.output[0].dxw = np.empty(0)455 else:456 self.output[0].dx = np.empty(0)457 488 return self.output[0], empty 458 489 … … 526 557 self.current_datainfo = DataInfo() 527 558 528 def _initialize_new_data_set(self, parent_list=None):559 def _initialize_new_data_set(self, node=None): 529 560 """ 530 561 A private class method to generate a new 1D data object. 531 562 Outside methods should call add_data_set() to be sure any existing data is stored properly. 532 563 533 :param parent_list: List of names of parent elements 534 """ 535 536 if parent_list is None: 537 parent_list = [] 564 :param node: XML node to determine if 1D or 2D data 565 """ 538 566 x = np.array(0) 539 567 y = np.array(0) 568 for child in node: 569 if child.tag.replace(self.base_ns, "") == "Idata": 570 for i_child in child: 571 if i_child.tag.replace(self.base_ns, "") == "Qx": 572 self.current_dataset = plottable_2D() 573 return 540 574 self.current_dataset = plottable_1D(x, y) 541 575 … … 572 606 """ 573 607 574 # #Append errors to dataset and reset class errors608 # Append errors to dataset and reset class errors 575 609 self.current_datainfo.errors = set() 576 610 for error in self.errors: … … 578 612 self.errors.clear() 579 613 580 # #Combine all plottables with datainfo and append each to output581 # #Type cast data arrays to float64 and find min/max as appropriate614 # Combine all plottables with datainfo and append each to output 615 # Type cast data arrays to float64 and find min/max as appropriate 582 616 for dataset in self.data: 583 if dataset.x is not None: 584 dataset.x = np.delete(dataset.x, [0]) 585 dataset.x = dataset.x.astype(np.float64) 586 dataset.xmin = np.min(dataset.x) 587 dataset.xmax = np.max(dataset.x) 588 if dataset.y is not None: 589 dataset.y = np.delete(dataset.y, [0]) 590 dataset.y = dataset.y.astype(np.float64) 591 dataset.ymin = np.min(dataset.y) 592 dataset.ymax = np.max(dataset.y) 593 if dataset.dx is not None: 594 dataset.dx = np.delete(dataset.dx, [0]) 595 dataset.dx = dataset.dx.astype(np.float64) 596 if dataset.dxl is not None: 597 dataset.dxl = np.delete(dataset.dxl, [0]) 598 dataset.dxl = dataset.dxl.astype(np.float64) 599 if dataset.dxw is not None: 600 dataset.dxw = np.delete(dataset.dxw, [0]) 601 dataset.dxw = dataset.dxw.astype(np.float64) 602 if dataset.dy is not None: 603 dataset.dy = np.delete(dataset.dy, [0]) 604 dataset.dy = dataset.dy.astype(np.float64) 605 np.trim_zeros(dataset.x) 606 np.trim_zeros(dataset.y) 607 np.trim_zeros(dataset.dy) 617 if isinstance(dataset, plottable_1D): 618 if dataset.x is not None: 619 dataset.x = np.delete(dataset.x, [0]) 620 dataset.x = dataset.x.astype(np.float64) 621 dataset.xmin = np.min(dataset.x) 622 dataset.xmax = np.max(dataset.x) 623 if dataset.y is not None: 624 dataset.y = np.delete(dataset.y, [0]) 625 dataset.y = dataset.y.astype(np.float64) 626 dataset.ymin = np.min(dataset.y) 627 dataset.ymax = np.max(dataset.y) 628 if dataset.dx is not None: 629 dataset.dx = np.delete(dataset.dx, [0]) 630 dataset.dx = dataset.dx.astype(np.float64) 631 if dataset.dxl is not None: 632 dataset.dxl = np.delete(dataset.dxl, [0]) 633 dataset.dxl = dataset.dxl.astype(np.float64) 634 if dataset.dxw is not None: 635 dataset.dxw = np.delete(dataset.dxw, [0]) 636 dataset.dxw = dataset.dxw.astype(np.float64) 637 if dataset.dy is not None: 638 dataset.dy = np.delete(dataset.dy, [0]) 639 dataset.dy = dataset.dy.astype(np.float64) 640 np.trim_zeros(dataset.x) 641 np.trim_zeros(dataset.y) 642 np.trim_zeros(dataset.dy) 643 elif isinstance(dataset, plottable_2D): 644 dataset.data = dataset.data.astype(np.float64) 645 dataset.qx_data = dataset.qx_data.astype(np.float64) 646 dataset.xmin = np.min(dataset.qx_data) 647 dataset.xmax = np.max(dataset.qx_data) 648 dataset.qy_data = dataset.qy_data.astype(np.float64) 649 dataset.ymin = np.min(dataset.qy_data) 650 dataset.ymax = np.max(dataset.qy_data) 651 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 652 + dataset.qy_data * dataset.qy_data) 653 if dataset.err_data is not None: 654 dataset.err_data = dataset.err_data.astype(np.float64) 655 if dataset.dqx_data is not None: 656 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 657 if dataset.dqy_data is not None: 658 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 659 if dataset.mask is not None: 660 dataset.mask = dataset.mask.astype(dtype=bool) 661 662 if len(dataset.shape) == 2: 663 n_rows, n_cols = dataset.shape 664 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 665 dataset.x_bins = dataset.qx_data[:int(n_cols)] 666 dataset.data = dataset.data.flatten() 667 else: 668 dataset.y_bins = [] 669 dataset.x_bins = [] 670 dataset.data = dataset.data.flatten() 671 608 672 final_dataset = combine_data(dataset, self.current_datainfo) 609 673 self.output.append(final_dataset) … … 705 769 and local_unit.lower() != "none": 706 770 if HAS_CONVERTER == True: 707 # #Check local units - bad units raise KeyError771 # Check local units - bad units raise KeyError 708 772 data_conv_q = Converter(local_unit) 709 773 value_unit = default_unit … … 752 816 A method to check all resolution data sets are the same size as I and Q 753 817 """ 754 dql_exists = False 755 dqw_exists = False 756 dq_exists = False 757 di_exists = False 758 if self.current_dataset.dxl is not None: 759 dql_exists = True 760 if self.current_dataset.dxw is not None: 761 dqw_exists = True 762 if self.current_dataset.dx is not None: 763 dq_exists = True 764 if self.current_dataset.dy is not None: 765 di_exists = True 766 if dqw_exists and not dql_exists: 767 array_size = self.current_dataset.dxw.size - 1 768 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 769 elif dql_exists and not dqw_exists: 770 array_size = self.current_dataset.dxl.size - 1 771 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 772 elif not dql_exists and not dqw_exists and not dq_exists: 773 array_size = self.current_dataset.x.size - 1 774 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 775 if not di_exists: 776 array_size = self.current_dataset.y.size - 1 777 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 778 818 if isinstance(self.current_dataset, plottable_1D): 819 dql_exists = False 820 dqw_exists = False 821 dq_exists = False 822 di_exists = False 823 if self.current_dataset.dxl is not None: 824 dql_exists = True 825 if self.current_dataset.dxw is not None: 826 dqw_exists = True 827 if self.current_dataset.dx is not None: 828 dq_exists = True 829 if self.current_dataset.dy is not None: 830 di_exists = True 831 if dqw_exists and not dql_exists: 832 array_size = self.current_dataset.dxw.size - 1 833 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 834 np.zeros([array_size])) 835 elif dql_exists and not dqw_exists: 836 array_size = self.current_dataset.dxl.size - 1 837 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 838 np.zeros([array_size])) 839 elif not dql_exists and not dqw_exists and not dq_exists: 840 array_size = self.current_dataset.x.size - 1 841 self.current_dataset.dx = np.append(self.current_dataset.dx, 842 np.zeros([array_size])) 843 if not di_exists: 844 array_size = self.current_dataset.y.size - 1 845 self.current_dataset.dy = np.append(self.current_dataset.dy, 846 np.zeros([array_size])) 847 elif isinstance(self.current_dataset, plottable_2D): 848 dqx_exists = False 849 dqy_exists = False 850 di_exists = False 851 mask_exists = False 852 if self.current_dataset.dqx_data is not None: 853 dqx_exists = True 854 if self.current_dataset.dqy_data is not None: 855 dqy_exists = True 856 if self.current_dataset.err_data is not None: 857 di_exists = True 858 if self.current_dataset.mask is not None: 859 mask_exists = True 860 if not dqy_exists: 861 array_size = self.current_dataset.qy_data.size - 1 862 self.current_dataset.dqy_data = np.append( 863 self.current_dataset.dqy_data, np.zeros([array_size])) 864 if not dqx_exists: 865 array_size = self.current_dataset.qx_data.size - 1 866 self.current_dataset.dqx_data = np.append( 867 self.current_dataset.dqx_data, np.zeros([array_size])) 868 if not di_exists: 869 array_size = self.current_dataset.data.size - 1 870 self.current_dataset.err_data = np.append( 871 self.current_dataset.err_data, np.zeros([array_size])) 872 if not mask_exists: 873 array_size = self.current_dataset.data.size - 1 874 self.current_dataset.mask = np.append( 875 self.current_dataset.mask, 876 np.ones([array_size] ,dtype=bool)) 779 877 780 878 ####### All methods below are for writing CanSAS XML files ####### 781 782 879 783 880 def write(self, filename, datainfo): … … 804 901 :param datainfo: Data1D object 805 902 """ 806 if not issubclass(datainfo.__class__, Data1D): 807 raise RuntimeError, "The cansas writer expects a Data1D instance" 903 is_2d = False 904 if issubclass(datainfo.__class__, Data2D): 905 is_2d = True 808 906 809 907 # Get PIs and create root element … … 825 923 self._write_run_names(datainfo, entry_node) 826 924 # Add Data info to SASEntry 827 self._write_data(datainfo, entry_node) 925 if is_2d: 926 self._write_data_2d(datainfo, entry_node) 927 else: 928 self._write_data(datainfo, entry_node) 828 929 # Transmission Spectrum Info 829 930 self._write_trans_spectrum(datainfo, entry_node) … … 919 1020 def _write_data(self, datainfo, entry_node): 920 1021 """ 921 Writes theI and Q data to the XML file1022 Writes 1D I and Q data to the XML file 922 1023 923 1024 :param datainfo: The Data1D object the information is coming from … … 954 1055 {'unit': datainfo.sample.zacceptance[1]}) 955 1056 1057 1058 def _write_data_2d(self, datainfo, entry_node): 1059 """ 1060 Writes 2D data to the XML file 1061 1062 :param datainfo: The Data2D object the information is coming from 1063 :param entry_node: lxml node ElementTree object to be appended to 1064 """ 1065 attr = {} 1066 if datainfo.data.shape: 1067 attr["x_bins"] = str(len(datainfo.x_bins)) 1068 attr["y_bins"] = str(len(datainfo.y_bins)) 1069 node = self.create_element("SASdata", attr) 1070 self.append(node, entry_node) 1071 1072 point = self.create_element("Idata") 1073 node.append(point) 1074 qx = ','.join([str(datainfo.qx_data[i]) for i in xrange(len(datainfo.qx_data))]) 1075 qy = ','.join([str(datainfo.qy_data[i]) for i in xrange(len(datainfo.qy_data))]) 1076 intensity = ','.join([str(datainfo.data[i]) for i in xrange(len(datainfo.data))]) 1077 1078 self.write_node(point, "Qx", qx, 1079 {'unit': datainfo._xunit}) 1080 self.write_node(point, "Qy", qy, 1081 {'unit': datainfo._yunit}) 1082 self.write_node(point, "I", intensity, 1083 {'unit': datainfo._zunit}) 1084 if datainfo.err_data is not None: 1085 err = ','.join([str(datainfo.err_data[i]) for i in 1086 xrange(len(datainfo.err_data))]) 1087 self.write_node(point, "Idev", err, 1088 {'unit': datainfo._zunit}) 1089 if datainfo.dqy_data is not None: 1090 dqy = ','.join([str(datainfo.dqy_data[i]) for i in 1091 xrange(len(datainfo.dqy_data))]) 1092 self.write_node(point, "Qydev", dqy, 1093 {'unit': datainfo._yunit}) 1094 if datainfo.dqx_data is not None: 1095 dqx = ','.join([str(datainfo.dqx_data[i]) for i in 1096 xrange(len(datainfo.dqx_data))]) 1097 self.write_node(point, "Qxdev", dqx, 1098 {'unit': datainfo._xunit}) 1099 if datainfo.mask is not None: 1100 mask = ','.join( 1101 ["1" if datainfo.mask[i] else "0" 1102 for i in xrange(len(datainfo.mask))]) 1103 self.write_node(point, "Mask", mask) 956 1104 957 1105 def _write_trans_spectrum(self, datainfo, entry_node): -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/dataloader/readers/cansas_constants.py
r250fec92 rad4632c 133 133 "variable" : None, 134 134 "children" : {"Idata" : SASDATA_IDATA, 135 "Sesans": {"storeas": "content"}, 136 "zacceptance": {"storeas": "float"}, 135 137 "<any>" : ANY 136 138 } -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r345e7e4 rb5db35d 8 8 import numpy 9 9 import os 10 from sas.sascalc.dataloader.data_info import SESANSData1D10 from sas.sascalc.dataloader.data_info import Data1D 11 11 12 12 # Check whether we have a converter available … … 59 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 60 buff = input_f.read() 61 # print buff62 61 lines = buff.splitlines() 63 # print lines64 #Jae could not find python universal line spliter:65 #keep the below for now66 # some ascii data has \r line separator,67 # try it when the data is on only one long line68 # if len(lines) < 2 :69 # lines = buff.split('\r')70 71 62 x = numpy.zeros(0) 72 63 y = numpy.zeros(0) … … 83 74 tdlam = numpy.zeros(0) 84 75 tdx = numpy.zeros(0) 85 # print "all good" 86 output = SESANSData1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam) 87 # print output 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 88 77 self.filename = output.filename = basename 89 78 90 # #Initialize counters for data lines and header lines.91 # is_data = False # Has more than 5 lines92 # # More than "5" lines of data is considered as actual93 # # data unless that is the only data94 # mum_data_lines = 595 # # To count # of current data candidate lines96 # i = -197 # # To count total # of previous data candidate lines98 # i1 = -199 # # To count # of header lines100 # j = -1101 # # Helps to count # of header lines102 # j1 = -1103 # #minimum required number of columns of data; ( <= 4).104 # lentoks = 2105 79 paramnames=[] 106 80 paramvals=[] … … 111 85 Pvals=[] 112 86 dPvals=[] 113 # print x 114 # print zvals 87 115 88 for line in lines: 116 89 # Initial try for CSV (split on ,) … … 122 95 if len(toks)>5: 123 96 zvals.append(toks[0]) 124 dzvals.append(toks[ 1])125 lamvals.append(toks[ 2])126 dlamvals.append(toks[ 3])127 Pvals.append(toks[ 4])128 dPvals.append(toks[ 5])97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 129 102 else: 130 103 continue … … 140 113 default_z_unit = "A" 141 114 data_conv_P = None 142 default_p_unit = " " 115 default_p_unit = " " # Adjust unit for axis (L^-3) 143 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 144 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 145 120 valrange=range(1, len(zvals)) … … 161 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 162 137 output.y = y 138 output.y_unit = '\AA^{-2} cm^{-1}' # output y_unit added 163 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 164 140 output.dy = dy … … 166 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 167 143 168 output.xaxis("\ rm{z}", output.x_unit)169 output.yaxis("\\rm{ P/P0}", output.y_unit)144 output.xaxis("\\rm{z}", output.x_unit) 145 output.yaxis("\\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 170 146 # Store loading process information 171 147 output.meta_data['loader'] = self.type_name 172 output.sample.thickness = float(paramvals[6])148 #output.sample.thickness = float(paramvals[6]) 173 149 output.sample.name = paramvals[1] 174 150 output.sample.ID = paramvals[0] 175 151 zaccept_unit_split = paramnames[7].split("[") 176 152 zaccept_unit = zaccept_unit_split[1].replace("]","") 177 if zaccept_unit.strip() == '\AA^-1' :153 if zaccept_unit.strip() == '\AA^-1' or zaccept_unit.strip() == '\A^-1': 178 154 zaccept_unit = "1/A" 179 155 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 180 output.vars =varheader156 output.vars = varheader 181 157 182 158 if len(output.x) < 1:
Note: See TracChangeset
for help on using the changeset viewer.