- Timestamp:
- Feb 14, 2017 3:27:46 PM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 2ffe241
- Parents:
- 68adf86 (diff), 2510b9b (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - git-author:
- Jeff Krzywon <krzywon@…> (02/14/17 15:27:46)
- git-committer:
- GitHub <noreply@…> (02/14/17 15:27:46)
- Location:
- src/sas
- Files:
-
- 19 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader.py
rad4632c rb5e9ce26 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 22 from sas.sascalc.dataloader.data_info import Data1D, Data2D, DataInfo, \ 23 plottable_1D, plottable_2D 24 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, \ 25 Detector, Process, Aperture 26 from sas.sascalc.dataloader.data_info import \ 27 combine_data_info_with_plottable as combine_data 25 28 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 26 29 from sas.sascalc.dataloader.readers.xml_reader import XMLreader … … 56 59 The CanSAS reader requires PyXML 0.8.4 or later. 57 60 """ 58 # #CanSAS version - defaults to version 1.061 # CanSAS version - defaults to version 1.0 59 62 cansas_version = "1.0" 60 63 base_ns = "{cansas1d/1.0}" … … 63 66 invalid = True 64 67 frm = "" 65 # #Log messages and errors68 # Log messages and errors 66 69 logging = None 67 70 errors = set() 68 # #Namespace hierarchy for current xml_file object71 # Namespace hierarchy for current xml_file object 69 72 names = None 70 73 ns_list = None 71 # #Temporary storage location for loading multiple data sets in a single file74 # Temporary storage location for loading multiple data sets in a single file 72 75 current_datainfo = None 73 76 current_dataset = None 74 77 current_data1d = None 75 78 data = None 76 # #List of data1D objects to be sent back to SasView79 # List of data1D objects to be sent back to SasView 77 80 output = None 78 # #Wildcards81 # Wildcards 79 82 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] 80 # #List of allowed extensions83 # List of allowed extensions 81 84 ext = ['.xml', '.XML', '.svs', '.SVS'] 82 # #Flag to bypass extension check85 # Flag to bypass extension check 83 86 allow_all = True 84 87 … … 220 223 self.parent_class = tagname_original 221 224 if tagname == 'SASdata': 222 self._initialize_new_data_set() 223 ## Recursion step to access data within the group 225 self._initialize_new_data_set(node) 226 if isinstance(self.current_dataset, plottable_2D): 227 x_bins = attr.get("x_bins", "") 228 y_bins = attr.get("y_bins", "") 229 if x_bins is not "" and y_bins is not "": 230 self.current_dataset.shape = (x_bins, y_bins) 231 else: 232 self.current_dataset.shape = () 233 # Recursion step to access data within the group 224 234 self._parse_entry(node, True) 225 235 if tagname == "SASsample": … … 234 244 self.add_intermediate() 235 245 else: 236 data_point, unit = self._get_node_value(node, tagname) 237 238 ## If this is a dataset, store the data appropriately 246 if isinstance(self.current_dataset, plottable_2D): 247 data_point = node.text 248 unit = attr.get('unit', '') 249 else: 250 data_point, unit = self._get_node_value(node, tagname) 251 252 # If this is a dataset, store the data appropriately 239 253 if tagname == 'Run': 240 254 self.current_datainfo.run_name[data_point] = name … … 245 259 self.current_datainfo.notes.append(data_point) 246 260 247 # # I and Q Data248 elif tagname == 'I' :261 # I and Q - 1D data 262 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 249 263 unit_list = unit.split(" | ") 250 264 if len(unit_list) > 1: … … 253 267 self.current_dataset.yaxis("Intensity", unit) 254 268 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 255 elif tagname == 'Idev' :269 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 256 270 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 257 271 elif tagname == 'Q': … … 277 291 self.current_datainfo.sample.zacceptance = (data_point, unit) 278 292 279 ## Sample Information 293 # I and Qx, Qy - 2D data 294 elif tagname == 'I' and isinstance(self.current_dataset, plottable_2D): 295 self.current_dataset.yaxis("Intensity", unit) 296 self.current_dataset.data = np.fromstring(data_point, dtype=float, sep=",") 297 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_2D): 298 self.current_dataset.err_data = np.fromstring(data_point, dtype=float, sep=",") 299 elif tagname == 'Qx': 300 self.current_dataset.xaxis("Qx", unit) 301 self.current_dataset.qx_data = np.fromstring(data_point, dtype=float, sep=",") 302 elif tagname == 'Qy': 303 self.current_dataset.yaxis("Qy", unit) 304 self.current_dataset.qy_data = np.fromstring(data_point, dtype=float, sep=",") 305 elif tagname == 'Qxdev': 306 self.current_dataset.xaxis("Qxdev", unit) 307 self.current_dataset.dqx_data = np.fromstring(data_point, dtype=float, sep=",") 308 elif tagname == 'Qydev': 309 self.current_dataset.yaxis("Qydev", unit) 310 self.current_dataset.dqy_data = np.fromstring(data_point, dtype=float, sep=",") 311 elif tagname == 'Mask': 312 inter = [item == "1" for item in data_point.split(",")] 313 self.current_dataset.mask = np.asarray(inter, dtype=bool) 314 315 # Sample Information 280 316 elif tagname == 'ID' and self.parent_class == 'SASsample': 281 317 self.current_datainfo.sample.ID = data_point … … 311 347 self.current_datainfo.sample.orientation_unit = unit 312 348 313 # #Instrumental Information349 # Instrumental Information 314 350 elif tagname == 'name' and self.parent_class == 'SASinstrument': 315 351 self.current_datainfo.instrument = data_point 316 # #Detector Information352 # Detector Information 317 353 elif tagname == 'name' and self.parent_class == 'SASdetector': 318 354 self.detector.name = data_point … … 359 395 self.detector.orientation.z = data_point 360 396 self.detector.orientation_unit = unit 361 # #Collimation and Aperture397 # Collimation and Aperture 362 398 elif tagname == 'length' and self.parent_class == 'SAScollimation': 363 399 self.collimation.length = data_point … … 378 414 self.collimation.size_unit = unit 379 415 380 # #Process Information416 # Process Information 381 417 elif tagname == 'name' and self.parent_class == 'SASprocess': 382 418 self.process.name = data_point … … 398 434 self.process.term.append(dic) 399 435 400 # #Transmission Spectrum436 # Transmission Spectrum 401 437 elif tagname == 'T' and self.parent_class == 'Tdata': 402 438 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) … … 409 445 self.transspectrum.wavelength_unit = unit 410 446 411 # #Source Information447 # Source Information 412 448 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 413 449 self.current_datainfo.source.wavelength = data_point … … 436 472 self.current_datainfo.source.beam_shape = data_point 437 473 438 # #Everything else goes in meta_data474 # Everything else goes in meta_data 439 475 else: 440 476 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) … … 450 486 self.add_data_set() 451 487 empty = None 452 if self.output[0].dx is not None:453 self.output[0].dxl = np.empty(0)454 self.output[0].dxw = np.empty(0)455 else:456 self.output[0].dx = np.empty(0)457 488 return self.output[0], empty 458 489 … … 526 557 self.current_datainfo = DataInfo() 527 558 528 def _initialize_new_data_set(self, parent_list=None):559 def _initialize_new_data_set(self, node=None): 529 560 """ 530 561 A private class method to generate a new 1D data object. 531 562 Outside methods should call add_data_set() to be sure any existing data is stored properly. 532 563 533 :param parent_list: List of names of parent elements 534 """ 535 536 if parent_list is None: 537 parent_list = [] 564 :param node: XML node to determine if 1D or 2D data 565 """ 538 566 x = np.array(0) 539 567 y = np.array(0) 568 for child in node: 569 if child.tag.replace(self.base_ns, "") == "Idata": 570 for i_child in child: 571 if i_child.tag.replace(self.base_ns, "") == "Qx": 572 self.current_dataset = plottable_2D() 573 return 540 574 self.current_dataset = plottable_1D(x, y) 541 575 … … 572 606 """ 573 607 574 # #Append errors to dataset and reset class errors608 # Append errors to dataset and reset class errors 575 609 self.current_datainfo.errors = set() 576 610 for error in self.errors: … … 578 612 self.errors.clear() 579 613 580 # #Combine all plottables with datainfo and append each to output581 # #Type cast data arrays to float64 and find min/max as appropriate614 # Combine all plottables with datainfo and append each to output 615 # Type cast data arrays to float64 and find min/max as appropriate 582 616 for dataset in self.data: 583 if dataset.x is not None: 584 dataset.x = np.delete(dataset.x, [0]) 585 dataset.x = dataset.x.astype(np.float64) 586 dataset.xmin = np.min(dataset.x) 587 dataset.xmax = np.max(dataset.x) 588 if dataset.y is not None: 589 dataset.y = np.delete(dataset.y, [0]) 590 dataset.y = dataset.y.astype(np.float64) 591 dataset.ymin = np.min(dataset.y) 592 dataset.ymax = np.max(dataset.y) 593 if dataset.dx is not None: 594 dataset.dx = np.delete(dataset.dx, [0]) 595 dataset.dx = dataset.dx.astype(np.float64) 596 if dataset.dxl is not None: 597 dataset.dxl = np.delete(dataset.dxl, [0]) 598 dataset.dxl = dataset.dxl.astype(np.float64) 599 if dataset.dxw is not None: 600 dataset.dxw = np.delete(dataset.dxw, [0]) 601 dataset.dxw = dataset.dxw.astype(np.float64) 602 if dataset.dy is not None: 603 dataset.dy = np.delete(dataset.dy, [0]) 604 dataset.dy = dataset.dy.astype(np.float64) 605 np.trim_zeros(dataset.x) 606 np.trim_zeros(dataset.y) 607 np.trim_zeros(dataset.dy) 617 if isinstance(dataset, plottable_1D): 618 if dataset.x is not None: 619 dataset.x = np.delete(dataset.x, [0]) 620 dataset.x = dataset.x.astype(np.float64) 621 dataset.xmin = np.min(dataset.x) 622 dataset.xmax = np.max(dataset.x) 623 if dataset.y is not None: 624 dataset.y = np.delete(dataset.y, [0]) 625 dataset.y = dataset.y.astype(np.float64) 626 dataset.ymin = np.min(dataset.y) 627 dataset.ymax = np.max(dataset.y) 628 if dataset.dx is not None: 629 dataset.dx = np.delete(dataset.dx, [0]) 630 dataset.dx = dataset.dx.astype(np.float64) 631 if dataset.dxl is not None: 632 dataset.dxl = np.delete(dataset.dxl, [0]) 633 dataset.dxl = dataset.dxl.astype(np.float64) 634 if dataset.dxw is not None: 635 dataset.dxw = np.delete(dataset.dxw, [0]) 636 dataset.dxw = dataset.dxw.astype(np.float64) 637 if dataset.dy is not None: 638 dataset.dy = np.delete(dataset.dy, [0]) 639 dataset.dy = dataset.dy.astype(np.float64) 640 np.trim_zeros(dataset.x) 641 np.trim_zeros(dataset.y) 642 np.trim_zeros(dataset.dy) 643 elif isinstance(dataset, plottable_2D): 644 dataset.data = dataset.data.astype(np.float64) 645 dataset.qx_data = dataset.qx_data.astype(np.float64) 646 dataset.xmin = np.min(dataset.qx_data) 647 dataset.xmax = np.max(dataset.qx_data) 648 dataset.qy_data = dataset.qy_data.astype(np.float64) 649 dataset.ymin = np.min(dataset.qy_data) 650 dataset.ymax = np.max(dataset.qy_data) 651 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 652 + dataset.qy_data * dataset.qy_data) 653 if dataset.err_data is not None: 654 dataset.err_data = dataset.err_data.astype(np.float64) 655 if dataset.dqx_data is not None: 656 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 657 if dataset.dqy_data is not None: 658 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 659 if dataset.mask is not None: 660 dataset.mask = dataset.mask.astype(dtype=bool) 661 662 if len(dataset.shape) == 2: 663 n_rows, n_cols = dataset.shape 664 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 665 dataset.x_bins = dataset.qx_data[:int(n_cols)] 666 dataset.data = dataset.data.flatten() 667 else: 668 dataset.y_bins = [] 669 dataset.x_bins = [] 670 dataset.data = dataset.data.flatten() 671 608 672 final_dataset = combine_data(dataset, self.current_datainfo) 609 673 self.output.append(final_dataset) … … 705 769 and local_unit.lower() != "none": 706 770 if HAS_CONVERTER == True: 707 # #Check local units - bad units raise KeyError771 # Check local units - bad units raise KeyError 708 772 data_conv_q = Converter(local_unit) 709 773 value_unit = default_unit … … 752 816 A method to check all resolution data sets are the same size as I and Q 753 817 """ 754 dql_exists = False 755 dqw_exists = False 756 dq_exists = False 757 di_exists = False 758 if self.current_dataset.dxl is not None: 759 dql_exists = True 760 if self.current_dataset.dxw is not None: 761 dqw_exists = True 762 if self.current_dataset.dx is not None: 763 dq_exists = True 764 if self.current_dataset.dy is not None: 765 di_exists = True 766 if dqw_exists and not dql_exists: 767 array_size = self.current_dataset.dxw.size - 1 768 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 769 elif dql_exists and not dqw_exists: 770 array_size = self.current_dataset.dxl.size - 1 771 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 772 elif not dql_exists and not dqw_exists and not dq_exists: 773 array_size = self.current_dataset.x.size - 1 774 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 775 if not di_exists: 776 array_size = self.current_dataset.y.size - 1 777 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 778 818 if isinstance(self.current_dataset, plottable_1D): 819 dql_exists = False 820 dqw_exists = False 821 dq_exists = False 822 di_exists = False 823 if self.current_dataset.dxl is not None: 824 dql_exists = True 825 if self.current_dataset.dxw is not None: 826 dqw_exists = True 827 if self.current_dataset.dx is not None: 828 dq_exists = True 829 if self.current_dataset.dy is not None: 830 di_exists = True 831 if dqw_exists and not dql_exists: 832 array_size = self.current_dataset.dxw.size - 1 833 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 834 np.zeros([array_size])) 835 elif dql_exists and not dqw_exists: 836 array_size = self.current_dataset.dxl.size - 1 837 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 838 np.zeros([array_size])) 839 elif not dql_exists and not dqw_exists and not dq_exists: 840 array_size = self.current_dataset.x.size - 1 841 self.current_dataset.dx = np.append(self.current_dataset.dx, 842 np.zeros([array_size])) 843 if not di_exists: 844 array_size = self.current_dataset.y.size - 1 845 self.current_dataset.dy = np.append(self.current_dataset.dy, 846 np.zeros([array_size])) 847 elif isinstance(self.current_dataset, plottable_2D): 848 dqx_exists = False 849 dqy_exists = False 850 di_exists = False 851 mask_exists = False 852 if self.current_dataset.dqx_data is not None: 853 dqx_exists = True 854 if self.current_dataset.dqy_data is not None: 855 dqy_exists = True 856 if self.current_dataset.err_data is not None: 857 di_exists = True 858 if self.current_dataset.mask is not None: 859 mask_exists = True 860 if not dqy_exists: 861 array_size = self.current_dataset.qy_data.size - 1 862 self.current_dataset.dqy_data = np.append( 863 self.current_dataset.dqy_data, np.zeros([array_size])) 864 if not dqx_exists: 865 array_size = self.current_dataset.qx_data.size - 1 866 self.current_dataset.dqx_data = np.append( 867 self.current_dataset.dqx_data, np.zeros([array_size])) 868 if not di_exists: 869 array_size = self.current_dataset.data.size - 1 870 self.current_dataset.err_data = np.append( 871 self.current_dataset.err_data, np.zeros([array_size])) 872 if not mask_exists: 873 array_size = self.current_dataset.data.size - 1 874 self.current_dataset.mask = np.append( 875 self.current_dataset.mask, 876 np.ones([array_size] ,dtype=bool)) 779 877 780 878 ####### All methods below are for writing CanSAS XML files ####### 781 782 879 783 880 def write(self, filename, datainfo): … … 804 901 :param datainfo: Data1D object 805 902 """ 806 if not issubclass(datainfo.__class__, Data1D): 807 raise RuntimeError, "The cansas writer expects a Data1D instance" 903 is_2d = False 904 if issubclass(datainfo.__class__, Data2D): 905 is_2d = True 808 906 809 907 # Get PIs and create root element … … 825 923 self._write_run_names(datainfo, entry_node) 826 924 # Add Data info to SASEntry 827 self._write_data(datainfo, entry_node) 925 if is_2d: 926 self._write_data_2d(datainfo, entry_node) 927 else: 928 self._write_data(datainfo, entry_node) 828 929 # Transmission Spectrum Info 829 930 self._write_trans_spectrum(datainfo, entry_node) … … 919 1020 def _write_data(self, datainfo, entry_node): 920 1021 """ 921 Writes theI and Q data to the XML file1022 Writes 1D I and Q data to the XML file 922 1023 923 1024 :param datainfo: The Data1D object the information is coming from … … 954 1055 {'unit': datainfo.sample.zacceptance[1]}) 955 1056 1057 1058 def _write_data_2d(self, datainfo, entry_node): 1059 """ 1060 Writes 2D data to the XML file 1061 1062 :param datainfo: The Data2D object the information is coming from 1063 :param entry_node: lxml node ElementTree object to be appended to 1064 """ 1065 attr = {} 1066 if datainfo.data.shape: 1067 attr["x_bins"] = str(len(datainfo.x_bins)) 1068 attr["y_bins"] = str(len(datainfo.y_bins)) 1069 node = self.create_element("SASdata", attr) 1070 self.append(node, entry_node) 1071 1072 point = self.create_element("Idata") 1073 node.append(point) 1074 qx = ','.join([str(datainfo.qx_data[i]) for i in xrange(len(datainfo.qx_data))]) 1075 qy = ','.join([str(datainfo.qy_data[i]) for i in xrange(len(datainfo.qy_data))]) 1076 intensity = ','.join([str(datainfo.data[i]) for i in xrange(len(datainfo.data))]) 1077 1078 self.write_node(point, "Qx", qx, 1079 {'unit': datainfo._xunit}) 1080 self.write_node(point, "Qy", qy, 1081 {'unit': datainfo._yunit}) 1082 self.write_node(point, "I", intensity, 1083 {'unit': datainfo._zunit}) 1084 if datainfo.err_data is not None: 1085 err = ','.join([str(datainfo.err_data[i]) for i in 1086 xrange(len(datainfo.err_data))]) 1087 self.write_node(point, "Idev", err, 1088 {'unit': datainfo._zunit}) 1089 if datainfo.dqy_data is not None: 1090 dqy = ','.join([str(datainfo.dqy_data[i]) for i in 1091 xrange(len(datainfo.dqy_data))]) 1092 self.write_node(point, "Qydev", dqy, 1093 {'unit': datainfo._yunit}) 1094 if datainfo.dqx_data is not None: 1095 dqx = ','.join([str(datainfo.dqx_data[i]) for i in 1096 xrange(len(datainfo.dqx_data))]) 1097 self.write_node(point, "Qxdev", dqx, 1098 {'unit': datainfo._xunit}) 1099 if datainfo.mask is not None: 1100 mask = ','.join( 1101 ["1" if datainfo.mask[i] else "0" 1102 for i in xrange(len(datainfo.mask))]) 1103 self.write_node(point, "Mask", mask) 956 1104 957 1105 def _write_trans_spectrum(self, datainfo, entry_node): -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/file_converter/c_ext/bsl_loader.c
rdc8a553 r2ab9c432 1 1 #include <Python.h> 2 //#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 2 3 #include <numpy/arrayobject.h> 3 4 #include <stdio.h> … … 21 22 static PyObject *CLoader_init(CLoader *self, PyObject *args, PyObject *kwds) { 22 23 const char *filename; 23 constint n_frames;24 constint n_pixels;25 constint n_rasters;26 constint swap_bytes;24 int n_frames; 25 int n_pixels; 26 int n_rasters; 27 int swap_bytes; 27 28 28 29 if (self != NULL) { -
src/sas/sasgui/guiframe/local_perspectives/plotting/plotting.py
r6ffa0dd rca224b1 134 134 """ 135 135 for group_id in self.plot_panels.keys(): 136 panel = self.plot_panels[group_id] 137 panel.graph.reset() 138 self.hide_panel(group_id) 136 self.clear_panel_by_id(group_id) 139 137 self.plot_panels = {} 140 138 -
src/sas/sasgui/perspectives/fitting/basepage.py
r20522e1 ra6fccd7 1105 1105 """ 1106 1106 for key, value in self.master_category_dict.iteritems(): 1107 formfactor = state.formfactorcombobox.split(":") 1108 if isinstance(formfactor, list): 1109 formfactor = formfactor[0] 1107 1110 for list_item in value: 1108 if state.formfactorcomboboxin list_item:1111 if formfactor in list_item: 1109 1112 return self.categorybox.Items.index(key) 1110 1113 return 0 … … 1152 1155 self._show_combox_helper() 1153 1156 # select the current model 1154 try: 1155 # to support older version 1156 category_pos = int(state.categorycombobox) 1157 except: 1158 state.formfactorcombobox = state.formfactorcombobox.lower() 1159 state.formfactorcombobox = \ 1160 state.formfactorcombobox.replace('model', '') 1161 state.formfactorcombobox = unicode(state.formfactorcombobox) 1162 state.categorycombobox = unicode(state.categorycombobox) 1163 if state.categorycombobox in self.categorybox.Items: 1164 category_pos = self.categorybox.Items.index( 1165 state.categorycombobox) 1166 else: 1167 # Look in master list for model name (model.lower) 1168 category_pos = self.get_cat_combo_box_pos(state) 1157 state._convert_to_sasmodels() 1158 state.categorycombobox = unicode(state.categorycombobox) 1159 if state.categorycombobox in self.categorybox.Items: 1160 category_pos = self.categorybox.Items.index( 1161 state.categorycombobox) 1162 else: 1163 # Look in master list for model name (model.lower) 1164 category_pos = self.get_cat_combo_box_pos(state) 1169 1165 1170 1166 self.categorybox.Select(category_pos) 1171 1167 self._show_combox(None) 1172 try: 1173 # to support older version 1174 formfactor_pos = int(state.formfactorcombobox) 1175 except: 1176 formfactor_pos = 0 1177 for ind_form in range(self.formfactorbox.GetCount()): 1178 if self.formfactorbox.GetString(ind_form) == \ 1179 (state.formfactorcombobox): 1180 formfactor_pos = int(ind_form) 1168 from models import PLUGIN_NAME_BASE 1169 if self.categorybox.GetValue() == 'Customized Models' \ 1170 and PLUGIN_NAME_BASE not in state.formfactorcombobox: 1171 state.formfactorcombobox = \ 1172 PLUGIN_NAME_BASE + state.formfactorcombobox 1173 formfactor_pos = 0 1174 for ind_form in range(self.formfactorbox.GetCount()): 1175 if self.formfactorbox.GetString(ind_form) == \ 1176 (state.formfactorcombobox): 1177 formfactor_pos = int(ind_form) 1178 break 1179 1180 self.formfactorbox.Select(formfactor_pos) 1181 1182 structfactor_pos = 0 1183 if state.structurecombobox is not None: 1184 state.structurecombobox = unicode(state.structurecombobox) 1185 for ind_struct in range(self.structurebox.GetCount()): 1186 if self.structurebox.GetString(ind_struct) == \ 1187 (state.structurecombobox): 1188 structfactor_pos = int(ind_struct) 1181 1189 break 1182 1183 self.formfactorbox.Select(formfactor_pos)1184 1185 structfactor_pos = 01186 try:1187 # to support older version1188 structfactor_pos = int(state.structurecombobox)1189 except:1190 if state.structurecombobox is not None:1191 state.structurecombobox = unicode(state.structurecombobox)1192 for ind_struct in range(self.structurebox.GetCount()):1193 if self.structurebox.GetString(ind_struct) == \1194 (state.structurecombobox):1195 structfactor_pos = int(ind_struct)1196 break1197 1190 1198 1191 self.structurebox.SetSelection(structfactor_pos) … … 1384 1377 # self.state.struct_rbutton = self.struct_rbutton.GetValue() 1385 1378 # self.state.plugin_rbutton = self.plugin_rbutton.GetValue() 1386 self.state.structurecombobox = self.structurebox.Get Label()1387 self.state.formfactorcombobox = self.formfactorbox.Get Label()1388 self.state.categorycombobox = self.categorybox.Get Label()1379 self.state.structurecombobox = self.structurebox.GetValue() 1380 self.state.formfactorcombobox = self.formfactorbox.GetValue() 1381 self.state.categorycombobox = self.categorybox.GetValue() 1389 1382 1390 1383 # post state to fit panel … … 1587 1580 if len(statelist) == 0 or len(listtorestore) == 0: 1588 1581 return 1589 if len(statelist) != len(listtorestore):1590 return1591 1582 1592 1583 for j in range(len(listtorestore)): 1593 item_page = listtorestore[j] 1594 item_page_info = statelist[j] 1595 # change the state of the check box for simple parameters 1596 if item_page[0] is not None: 1597 item_page[0].SetValue(item_page_info[0]) 1598 if item_page[2] is not None: 1599 item_page[2].SetValue(item_page_info[2]) 1600 if item_page[2].__class__.__name__ == "ComboBox": 1601 if item_page_info[2] in self.model.fun_list: 1602 fun_val = self.model.fun_list[item_page_info[2]] 1603 self.model.setParam(item_page_info[1], fun_val) 1604 if item_page[3] is not None: 1605 # show or hide text +/- 1606 if item_page_info[2]: 1607 item_page[3].Show(True) 1608 else: 1609 item_page[3].Hide() 1610 if item_page[4] is not None: 1611 # show of hide the text crtl for fitting error 1612 if item_page_info[4][0]: 1613 item_page[4].Show(True) 1614 item_page[4].SetValue(item_page_info[4][1]) 1615 else: 1616 item_page[3].Hide() 1617 if item_page[5] is not None: 1618 # show of hide the text crtl for fitting error 1619 item_page[5].Show(item_page_info[5][0]) 1620 item_page[5].SetValue(item_page_info[5][1]) 1621 1622 if item_page[6] is not None: 1623 # show of hide the text crtl for fitting error 1624 item_page[6].Show(item_page_info[6][0]) 1625 item_page[6].SetValue(item_page_info[6][1]) 1584 for param in statelist: 1585 if param[1] == listtorestore[j][1]: 1586 item_page = listtorestore[j] 1587 item_page_info = param 1588 if (item_page_info[1] == "theta" or item_page_info[1] == 1589 "phi") and not self._is_2D(): 1590 break 1591 # change the state of the check box for simple parameters 1592 if item_page[0] is not None: 1593 item_page[0].SetValue(item_page_info[0]) 1594 if item_page[2] is not None: 1595 item_page[2].SetValue(item_page_info[2]) 1596 if item_page[2].__class__.__name__ == "ComboBox": 1597 if item_page_info[2] in self.model.fun_list: 1598 fun_val = self.model.fun_list[item_page_info[2]] 1599 self.model.setParam(item_page_info[1], fun_val) 1600 if item_page[3] is not None: 1601 # show or hide text +/- 1602 if item_page_info[2]: 1603 item_page[3].Show(True) 1604 else: 1605 item_page[3].Hide() 1606 if item_page[4] is not None: 1607 # show of hide the text crtl for fitting error 1608 if item_page_info[4][0]: 1609 item_page[4].Show(True) 1610 item_page[4].SetValue(str(item_page_info[4][1])) 1611 else: 1612 item_page[3].Hide() 1613 if item_page[5] is not None: 1614 # show of hide the text crtl for fitting error 1615 item_page[5].Show(True) 1616 item_page[5].SetValue(str(item_page_info[5][1])) 1617 if item_page[6] is not None: 1618 # show of hide the text crtl for fitting error 1619 item_page[6].Show(True) 1620 item_page[6].SetValue(str(item_page_info[6][1])) 1621 break 1626 1622 1627 1623 def _reset_strparam_state(self, listtorestore, statelist): -
src/sas/sasgui/perspectives/fitting/fitpage.py
r24fd27a ra6fccd7 1190 1190 self.state.slit_smearer = self.slit_smearer.GetValue() 1191 1191 1192 self.state.structurecombobox = self.structurebox.GetLabel() 1193 self.state.formfactorcombobox = self.formfactorbox.GetLabel() 1192 self.state.structurecombobox = self.structurebox.GetValue() 1193 self.state.formfactorcombobox = self.formfactorbox.GetValue() 1194 self.state.categorycombobox = self.categorybox.GetValue() 1194 1195 self.enable_fit_button() 1195 1196 if self.model is not None: -
src/sas/sasgui/perspectives/fitting/models.py
r313c5c9 r0de74af 23 23 PLUGIN_LOG = os.path.join(os.path.expanduser("~"), '.sasview', PLUGIN_DIR, 24 24 "plugins.log") 25 PLUGIN_NAME_BASE = '[plug-in] ' 25 26 26 27 def get_model_python_path(): … … 181 182 try: 182 183 model = load_custom_model(path) 183 model.name = "[plug-in] "+model.name184 model.name = PLUGIN_NAME_BASE + model.name 184 185 plugins[model.name] = model 185 186 except Exception: -
src/sas/sasgui/perspectives/fitting/pagestate.py
rc8e1996 r6d2b50b 25 25 from lxml import etree 26 26 27 from sasmodels import convert 27 28 import sasmodels.weights 28 29 … … 271 272 # store value of chisqr 272 273 self.tcChi = None 274 self.version = (1,0,0) 273 275 274 276 def clone(self): … … 349 351 obj.cb1 = copy.deepcopy(self.cb1) 350 352 obj.smearer = copy.deepcopy(self.smearer) 353 obj.version = copy.deepcopy(self.version) 351 354 352 355 for name, state in self.saved_states.iteritems(): … … 355 358 obj.saved_states[copy_name] = copy_state 356 359 return obj 360 361 def _old_first_model(self): 362 """ 363 Handle save states from 4.0.1 and before where the first item in the 364 selection boxes of category, formfactor and structurefactor were not 365 saved. 366 :return: None 367 """ 368 if self.formfactorcombobox == '': 369 FIRST_FORM = { 370 'Shapes' : 'BCCrystalModel', 371 'Uncategorized' : 'LineModel', 372 'StructureFactor' : 'HardsphereStructure', 373 'Ellipsoid' : 'core_shell_ellipsoid', 374 'Lamellae' : 'lamellar', 375 'Paracrystal' : 'bcc_paracrystal', 376 'Parallelepiped' : 'core_shell_parallelepiped', 377 'Shape Independent' : 'be_polyelectrolyte', 378 'Sphere' : 'adsorbed_layer', 379 'Structure Factor' : 'hardsphere', 380 'Customized Models' : '' 381 } 382 if self.categorycombobox == '': 383 if len(self.parameters) == 3: 384 self.categorycombobox = "Shape-Independent" 385 self.formfactorcombobox = 'PowerLawAbsModel' 386 elif len(self.parameters) == 9: 387 self.categorycombobox = 'Cylinder' 388 self.formfactorcombobox = 'barbell' 389 else: 390 msg = "Save state does not have enough information to load" 391 msg += " the all of the data." 392 logging.warning(msg=msg) 393 else: 394 self.formfactorcombobox = FIRST_FORM[self.categorycombobox] 395 396 @staticmethod 397 def param_remap_to_sasmodels_convert(params, is_string=False): 398 """ 399 Remaps the parameters for sasmodels conversion 400 401 :param params: list of parameters (likely self.parameters) 402 :return: remapped dictionary of parameters 403 """ 404 p = dict() 405 for fittable, name, value, _, uncert, lower, upper, units in params: 406 if not value: 407 value = numpy.nan 408 if not uncert or uncert[1] == '' or uncert[1] == 'None': 409 uncert[0] = False 410 uncert[1] = numpy.nan 411 if not upper or upper[1] == '' or upper[1] == 'None': 412 upper[0] = False 413 upper[1] = numpy.nan 414 if not lower or lower[1] == '' or lower[1] == 'None': 415 lower[0] = False 416 lower[1] = numpy.nan 417 if is_string: 418 p[name] = str(value) 419 else: 420 p[name] = float(value) 421 p[name + ".fittable"] = bool(fittable) 422 p[name + ".std"] = float(uncert[1]) 423 p[name + ".upper"] = float(upper[1]) 424 p[name + ".lower"] = float(lower[1]) 425 p[name + ".units"] = units 426 return p 427 428 @staticmethod 429 def param_remap_from_sasmodels_convert(params): 430 """ 431 Converts {name : value} map back to [] param list 432 :param params: parameter map returned from sasmodels 433 :return: None 434 """ 435 p_map = [] 436 for name, info in params.iteritems(): 437 if ".fittable" in name or ".std" in name or ".upper" in name or \ 438 ".lower" in name or ".units" in name: 439 pass 440 else: 441 fittable = params.get(name + ".fittable", True) 442 std = params.get(name + ".std", '0.0') 443 upper = params.get(name + ".upper", 'inf') 444 lower = params.get(name + ".lower", '-inf') 445 units = params.get(name + ".units") 446 if std is not None and std is not numpy.nan: 447 std = [True, str(std)] 448 else: 449 std = [False, ''] 450 if lower is not None and lower is not numpy.nan: 451 lower = [True, str(lower)] 452 else: 453 lower = [True, '-inf'] 454 if upper is not None and upper is not numpy.nan: 455 upper = [True, str(upper)] 456 else: 457 upper = [True, 'inf'] 458 param_list = [bool(fittable), str(name), str(info), 459 "+/-", std, lower, upper, str(units)] 460 p_map.append(param_list) 461 return p_map 462 463 def _convert_to_sasmodels(self): 464 """ 465 Convert parameters to a form usable by sasmodels converter 466 467 :return: None 468 """ 469 # Create conversion dictionary to send to sasmodels 470 self._old_first_model() 471 p = self.param_remap_to_sasmodels_convert(self.parameters) 472 structurefactor, params = convert.convert_model(self.structurecombobox, 473 p, False, self.version) 474 formfactor, params = convert.convert_model(self.formfactorcombobox, 475 params, False, self.version) 476 if len(self.str_parameters) > 0: 477 str_pars = self.param_remap_to_sasmodels_convert( 478 self.str_parameters, True) 479 formfactor, str_params = convert.convert_model( 480 self.formfactorcombobox, str_pars, False, self.version) 481 for key, value in str_params.iteritems(): 482 params[key] = value 483 484 if self.formfactorcombobox == 'SphericalSLDModel': 485 self.multi_factor += 1 486 self.formfactorcombobox = formfactor 487 self.structurecombobox = structurefactor 488 self.parameters = [] 489 self.parameters = self.param_remap_from_sasmodels_convert(params) 357 490 358 491 def _repr_helper(self, list, rep): … … 682 815 683 816 attr = newdoc.createAttribute("version") 684 attr.nodeValue = '1.0' 817 import sasview 818 attr.nodeValue = sasview.__version__ 819 # attr.nodeValue = '1.0' 685 820 top_element.setAttributeNode(attr) 686 821 … … 875 1010 raise RuntimeError, msg 876 1011 877 if node.get('version') and node.get('version') == '1.0': 1012 if node.get('version'): 1013 # Get the version for model conversion purposes 1014 self.version = tuple(int(e) for e in 1015 str.split(node.get('version'), ".")) 1016 # The tuple must be at least 3 items long 1017 while len(self.version) < 3: 1018 ver_list = list(self.version) 1019 ver_list.append(0) 1020 self.version = tuple(ver_list) 878 1021 879 1022 # Get file name … … 1033 1176 if self.cansas: 1034 1177 return self._read_cansas(path) 1035 1036 def _data2d_to_xml_doc(self, datainfo):1037 """1038 Create an XML document to contain the content of a Data2D1039 1040 :param datainfo: Data2D object1041 1042 """1043 if not issubclass(datainfo.__class__, Data2D):1044 raise RuntimeError, "The cansas writer expects a Data2D instance"1045 1046 title = "cansas1d/%s" % self.version1047 title += "http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd"1048 doc = xml.dom.minidom.Document()1049 main_node = doc.createElement("SASroot")1050 main_node.setAttribute("version", self.version)1051 main_node.setAttribute("xmlns", "cansas1d/%s" % self.version)1052 main_node.setAttribute("xmlns:xsi",1053 "http://www.w3.org/2001/XMLSchema-instance")1054 main_node.setAttribute("xsi:schemaLocation", title)1055 1056 doc.appendChild(main_node)1057 1058 entry_node = doc.createElement("SASentry")1059 main_node.appendChild(entry_node)1060 1061 write_node(doc, entry_node, "Title", datainfo.title)1062 if datainfo is not None:1063 write_node(doc, entry_node, "data_class",1064 datainfo.__class__.__name__)1065 for item in datainfo.run:1066 runname = {}1067 if item in datainfo.run_name and \1068 len(str(datainfo.run_name[item])) > 1:1069 runname = {'name': datainfo.run_name[item]}1070 write_node(doc, entry_node, "Run", item, runname)1071 # Data info1072 new_node = doc.createElement("SASdata")1073 entry_node.appendChild(new_node)1074 for item in LIST_OF_DATA_2D_ATTR:1075 element = doc.createElement(item[0])1076 element.setAttribute(item[0], str(getattr(datainfo, item[1])))1077 new_node.appendChild(element)1078 1079 for item in LIST_OF_DATA_2D_VALUES:1080 root_node = doc.createElement(item[0])1081 new_node.appendChild(root_node)1082 temp_list = getattr(datainfo, item[1])1083 1084 if temp_list is None or len(temp_list) == 0:1085 element = doc.createElement(item[0])1086 element.appendChild(doc.createTextNode(str(temp_list)))1087 root_node.appendChild(element)1088 else:1089 for value in temp_list:1090 element = doc.createElement(item[0])1091 element.setAttribute(item[0], str(value))1092 root_node.appendChild(element)1093 1094 # Sample info1095 sample = doc.createElement("SASsample")1096 if datainfo.sample.name is not None:1097 sample.setAttribute("name", str(datainfo.sample.name))1098 entry_node.appendChild(sample)1099 write_node(doc, sample, "ID", str(datainfo.sample.ID))1100 write_node(doc, sample, "thickness", datainfo.sample.thickness,1101 {"unit": datainfo.sample.thickness_unit})1102 write_node(doc, sample, "transmission", datainfo.sample.transmission)1103 write_node(doc, sample, "temperature", datainfo.sample.temperature,1104 {"unit": datainfo.sample.temperature_unit})1105 1106 for item in datainfo.sample.details:1107 write_node(doc, sample, "details", item)1108 1109 pos = doc.createElement("position")1110 written = write_node(doc, pos, "x", datainfo.sample.position.x,1111 {"unit": datainfo.sample.position_unit})1112 written = written | write_node(doc, pos, "y",1113 datainfo.sample.position.y,1114 {"unit": datainfo.sample.position_unit})1115 written = written | write_node(doc, pos, "z",1116 datainfo.sample.position.z,1117 {"unit": datainfo.sample.position_unit})1118 if written:1119 sample.appendChild(pos)1120 1121 ori = doc.createElement("orientation")1122 written = write_node(doc, ori, "roll", datainfo.sample.orientation.x,1123 {"unit": datainfo.sample.orientation_unit})1124 written = written | write_node(doc, ori, "pitch",1125 datainfo.sample.orientation.y,1126 {"unit":1127 datainfo.sample.orientation_unit})1128 written = written | write_node(doc, ori, "yaw",1129 datainfo.sample.orientation.z,1130 {"unit":1131 datainfo.sample.orientation_unit})1132 if written:1133 sample.appendChild(ori)1134 1135 # Instrument info1136 instr = doc.createElement("SASinstrument")1137 entry_node.appendChild(instr)1138 1139 write_node(doc, instr, "name", datainfo.instrument)1140 1141 # Source1142 source = doc.createElement("SASsource")1143 if datainfo.source.name is not None:1144 source.setAttribute("name", str(datainfo.source.name))1145 instr.appendChild(source)1146 1147 write_node(doc, source, "radiation", datainfo.source.radiation)1148 write_node(doc, source, "beam_shape", datainfo.source.beam_shape)1149 size = doc.createElement("beam_size")1150 if datainfo.source.beam_size_name is not None:1151 size.setAttribute("name", str(datainfo.source.beam_size_name))1152 written = write_node(doc, size, "x", datainfo.source.beam_size.x,1153 {"unit": datainfo.source.beam_size_unit})1154 written = written | write_node(doc, size, "y",1155 datainfo.source.beam_size.y,1156 {"unit": datainfo.source.beam_size_unit})1157 written = written | write_node(doc, size, "z",1158 datainfo.source.beam_size.z,1159 {"unit": datainfo.source.beam_size_unit})1160 if written:1161 source.appendChild(size)1162 1163 write_node(doc, source, "wavelength", datainfo.source.wavelength,1164 {"unit": datainfo.source.wavelength_unit})1165 write_node(doc, source, "wavelength_min",1166 datainfo.source.wavelength_min,1167 {"unit": datainfo.source.wavelength_min_unit})1168 write_node(doc, source, "wavelength_max",1169 datainfo.source.wavelength_max,1170 {"unit": datainfo.source.wavelength_max_unit})1171 write_node(doc, source, "wavelength_spread",1172 datainfo.source.wavelength_spread,1173 {"unit": datainfo.source.wavelength_spread_unit})1174 1175 # Collimation1176 for item in datainfo.collimation:1177 coll = doc.createElement("SAScollimation")1178 if item.name is not None:1179 coll.setAttribute("name", str(item.name))1180 instr.appendChild(coll)1181 1182 write_node(doc, coll, "length", item.length,1183 {"unit": item.length_unit})1184 1185 for apert in item.aperture:1186 ap = doc.createElement("aperture")1187 if apert.name is not None:1188 ap.setAttribute("name", str(apert.name))1189 if apert.type is not None:1190 ap.setAttribute("type", str(apert.type))1191 coll.appendChild(ap)1192 1193 write_node(doc, ap, "distance", apert.distance,1194 {"unit": apert.distance_unit})1195 1196 size = doc.createElement("size")1197 if apert.size_name is not None:1198 size.setAttribute("name", str(apert.size_name))1199 written = write_node(doc, size, "x", apert.size.x,1200 {"unit": apert.size_unit})1201 written = written | write_node(doc, size, "y", apert.size.y,1202 {"unit": apert.size_unit})1203 written = written | write_node(doc, size, "z", apert.size.z,1204 {"unit": apert.size_unit})1205 if written:1206 ap.appendChild(size)1207 1208 # Detectors1209 for item in datainfo.detector:1210 det = doc.createElement("SASdetector")1211 written = write_node(doc, det, "name", item.name)1212 written = written | write_node(doc, det, "SDD", item.distance,1213 {"unit": item.distance_unit})1214 written = written | write_node(doc, det, "slit_length",1215 item.slit_length,1216 {"unit": item.slit_length_unit})1217 if written:1218 instr.appendChild(det)1219 1220 off = doc.createElement("offset")1221 written = write_node(doc, off, "x", item.offset.x,1222 {"unit": item.offset_unit})1223 written = written | write_node(doc, off, "y", item.offset.y,1224 {"unit": item.offset_unit})1225 written = written | write_node(doc, off, "z", item.offset.z,1226 {"unit": item.offset_unit})1227 if written:1228 det.appendChild(off)1229 1230 center = doc.createElement("beam_center")1231 written = write_node(doc, center, "x", item.beam_center.x,1232 {"unit": item.beam_center_unit})1233 written = written | write_node(doc, center, "y",1234 item.beam_center.y,1235 {"unit": item.beam_center_unit})1236 written = written | write_node(doc, center, "z",1237 item.beam_center.z,1238 {"unit": item.beam_center_unit})1239 if written:1240 det.appendChild(center)1241 1242 pix = doc.createElement("pixel_size")1243 written = write_node(doc, pix, "x", item.pixel_size.x,1244 {"unit": item.pixel_size_unit})1245 written = written | write_node(doc, pix, "y", item.pixel_size.y,1246 {"unit": item.pixel_size_unit})1247 written = written | write_node(doc, pix, "z", item.pixel_size.z,1248 {"unit": item.pixel_size_unit})1249 if written:1250 det.appendChild(pix)1251 1252 ori = doc.createElement("orientation")1253 written = write_node(doc, ori, "roll", item.orientation.x,1254 {"unit": item.orientation_unit})1255 written = written | write_node(doc, ori, "pitch",1256 item.orientation.y,1257 {"unit": item.orientation_unit})1258 written = written | write_node(doc, ori, "yaw", item.orientation.z,1259 {"unit": item.orientation_unit})1260 if written:1261 det.appendChild(ori)1262 1263 # Processes info1264 for item in datainfo.process:1265 node = doc.createElement("SASprocess")1266 entry_node.appendChild(node)1267 1268 write_node(doc, node, "name", item.name)1269 write_node(doc, node, "date", item.date)1270 write_node(doc, node, "description", item.description)1271 for term in item.term:1272 value = term['value']1273 del term['value']1274 write_node(doc, node, "term", value, term)1275 for note in item.notes:1276 write_node(doc, node, "SASprocessnote", note)1277 # Return the document, and the SASentry node associated with1278 # the data we just wrote1279 return doc, entry_node1280 1178 1281 1179 def _parse_state(self, entry): … … 1354 1252 """ 1355 1253 node = dom.xpath('ns:data_class', namespaces={'ns': CANSAS_NS}) 1356 if not node or node[0].text.lstrip().rstrip() != "Data2D": 1357 return_value, _ = self._parse_entry(dom) 1358 numpy.trim_zeros(return_value.x) 1359 numpy.trim_zeros(return_value.y) 1360 numpy.trim_zeros(return_value.dy) 1361 size_dx = return_value.dx.size 1362 size_dxl = return_value.dxl.size 1363 size_dxw = return_value.dxw.size 1364 if size_dxl == 0 and size_dxw == 0: 1365 return_value.dxl = None 1366 return_value.dxw = None 1367 numpy.trim_zeros(return_value.dx) 1368 elif size_dx == 0: 1369 return_value.dx = None 1370 size_dx = size_dxl 1371 numpy.trim_zeros(return_value.dxl) 1372 numpy.trim_zeros(return_value.dxw) 1373 1374 return return_value, _ 1375 1376 # Parse 2D 1377 data_info = Data2D() 1378 1379 # Look up title 1380 self._store_content('ns:Title', dom, 'title', data_info) 1381 1382 # Look up run number 1383 nodes = dom.xpath('ns:Run', namespaces={'ns': CANSAS_NS}) 1384 for item in nodes: 1385 if item.text is not None: 1386 value = item.text.strip() 1387 if len(value) > 0: 1388 data_info.run.append(value) 1389 if item.get('name') is not None: 1390 data_info.run_name[value] = item.get('name') 1391 1392 # Look up instrument name 1393 self._store_content('ns:SASinstrument/ns:name', dom, 1394 'instrument', data_info) 1395 1396 # Notes 1397 note_list = dom.xpath('ns:SASnote', namespaces={'ns': CANSAS_NS}) 1398 for note in note_list: 1399 try: 1400 if note.text is not None: 1401 note_value = note.text.strip() 1402 if len(note_value) > 0: 1403 data_info.notes.append(note_value) 1404 except Exception: 1405 err_mess = "cansas_reader.read: error processing entry notes\n" 1406 err_mess += " %s" % sys.exc_value 1407 self.errors.append(err_mess) 1408 logging.error(err_mess) 1409 1410 # Sample info ################### 1411 entry = get_content('ns:SASsample', dom) 1412 if entry is not None: 1413 data_info.sample.name = entry.get('name') 1414 1415 self._store_content('ns:SASsample/ns:ID', dom, 'ID', data_info.sample) 1416 self._store_float('ns:SASsample/ns:thickness', dom, 'thickness', 1417 data_info.sample) 1418 self._store_float('ns:SASsample/ns:transmission', dom, 'transmission', 1419 data_info.sample) 1420 self._store_float('ns:SASsample/ns:temperature', dom, 'temperature', 1421 data_info.sample) 1422 1423 nodes = dom.xpath('ns:SASsample/ns:details', 1424 namespaces={'ns': CANSAS_NS}) 1425 for item in nodes: 1426 try: 1427 if item.text is not None: 1428 detail_value = item.text.strip() 1429 if len(detail_value) > 0: 1430 data_info.sample.details.append(detail_value) 1431 except Exception: 1432 err_mess = "cansas_reader.read: error processing entry notes\n" 1433 err_mess += " %s" % sys.exc_value 1434 self.errors.append(err_mess) 1435 logging.error(err_mess) 1436 1437 # Position (as a vector) 1438 self._store_float('ns:SASsample/ns:position/ns:x', dom, 'position.x', 1439 data_info.sample) 1440 self._store_float('ns:SASsample/ns:position/ns:y', dom, 'position.y', 1441 data_info.sample) 1442 self._store_float('ns:SASsample/ns:position/ns:z', dom, 'position.z', 1443 data_info.sample) 1444 1445 # Orientation (as a vector) 1446 self._store_float('ns:SASsample/ns:orientation/ns:roll', 1447 dom, 'orientation.x', data_info.sample) 1448 self._store_float('ns:SASsample/ns:orientation/ns:pitch', 1449 dom, 'orientation.y', data_info.sample) 1450 self._store_float('ns:SASsample/ns:orientation/ns:yaw', 1451 dom, 'orientation.z', data_info.sample) 1452 1453 # Source info ################### 1454 entry = get_content('ns:SASinstrument/ns:SASsource', dom) 1455 if entry is not None: 1456 data_info.source.name = entry.get('name') 1457 1458 self._store_content('ns:SASinstrument/ns:SASsource/ns:radiation', 1459 dom, 'radiation', data_info.source) 1460 self._store_content('ns:SASinstrument/ns:SASsource/ns:beam_shape', 1461 dom, 'beam_shape', data_info.source) 1462 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength', 1463 dom, 'wavelength', data_info.source) 1464 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_min', 1465 dom, 'wavelength_min', data_info.source) 1466 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_max', 1467 dom, 'wavelength_max', data_info.source) 1468 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_spread', 1469 dom, 'wavelength_spread', data_info.source) 1470 1471 # Beam size (as a vector) 1472 entry = get_content('ns:SASinstrument/ns:SASsource/ns:beam_size', dom) 1473 if entry is not None: 1474 data_info.source.beam_size_name = entry.get('name') 1475 1476 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:x', 1477 dom, 'beam_size.x', data_info.source) 1478 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:y', 1479 dom, 'beam_size.y', data_info.source) 1480 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:z', 1481 dom, 'beam_size.z', data_info.source) 1482 1483 # Collimation info ################### 1484 nodes = dom.xpath('ns:SASinstrument/ns:SAScollimation', 1485 namespaces={'ns': CANSAS_NS}) 1486 for item in nodes: 1487 collim = Collimation() 1488 if item.get('name') is not None: 1489 collim.name = item.get('name') 1490 self._store_float('ns:length', item, 'length', collim) 1491 1492 # Look for apertures 1493 apert_list = item.xpath('ns:aperture', 1494 namespaces={'ns': CANSAS_NS}) 1495 for apert in apert_list: 1496 aperture = Aperture() 1497 1498 # Get the name and type of the aperture 1499 aperture.name = apert.get('name') 1500 aperture.type = apert.get('type') 1501 1502 self._store_float('ns:distance', apert, 'distance', aperture) 1503 1504 entry = get_content('ns:size', apert) 1505 if entry is not None: 1506 aperture.size_name = entry.get('name') 1507 1508 self._store_float('ns:size/ns:x', apert, 'size.x', aperture) 1509 self._store_float('ns:size/ns:y', apert, 'size.y', aperture) 1510 self._store_float('ns:size/ns:z', apert, 'size.z', aperture) 1511 1512 collim.aperture.append(aperture) 1513 1514 data_info.collimation.append(collim) 1515 1516 # Detector info ###################### 1517 nodes = dom.xpath('ns:SASinstrument/ns:SASdetector', 1518 namespaces={'ns': CANSAS_NS}) 1519 for item in nodes: 1520 1521 detector = Detector() 1522 1523 self._store_content('ns:name', item, 'name', detector) 1524 self._store_float('ns:SDD', item, 'distance', detector) 1525 1526 # Detector offset (as a vector) 1527 self._store_float('ns:offset/ns:x', item, 'offset.x', detector) 1528 self._store_float('ns:offset/ns:y', item, 'offset.y', detector) 1529 self._store_float('ns:offset/ns:z', item, 'offset.z', detector) 1530 1531 # Detector orientation (as a vector) 1532 self._store_float('ns:orientation/ns:roll', item, 1533 'orientation.x', detector) 1534 self._store_float('ns:orientation/ns:pitch', item, 1535 'orientation.y', detector) 1536 self._store_float('ns:orientation/ns:yaw', item, 1537 'orientation.z', detector) 1538 1539 # Beam center (as a vector) 1540 self._store_float('ns:beam_center/ns:x', item, 1541 'beam_center.x', detector) 1542 self._store_float('ns:beam_center/ns:y', item, 1543 'beam_center.y', detector) 1544 self._store_float('ns:beam_center/ns:z', item, 1545 'beam_center.z', detector) 1546 1547 # Pixel size (as a vector) 1548 self._store_float('ns:pixel_size/ns:x', item, 1549 'pixel_size.x', detector) 1550 self._store_float('ns:pixel_size/ns:y', item, 1551 'pixel_size.y', detector) 1552 self._store_float('ns:pixel_size/ns:z', item, 1553 'pixel_size.z', detector) 1554 1555 self._store_float('ns:slit_length', item, 'slit_length', detector) 1556 1557 data_info.detector.append(detector) 1558 1559 # Processes info ###################### 1560 nodes = dom.xpath('ns:SASprocess', namespaces={'ns': CANSAS_NS}) 1561 for item in nodes: 1562 process = Process() 1563 self._store_content('ns:name', item, 'name', process) 1564 self._store_content('ns:date', item, 'date', process) 1565 self._store_content('ns:description', item, 'description', process) 1566 1567 term_list = item.xpath('ns:term', namespaces={'ns': CANSAS_NS}) 1568 for term in term_list: 1569 try: 1570 term_attr = {} 1571 for attr in term.keys(): 1572 term_attr[attr] = term.get(attr).strip() 1573 if term.text is not None: 1574 term_attr['value'] = term.text.strip() 1575 process.term.append(term_attr) 1576 except: 1577 err_mess = "cansas_reader.read: error processing " 1578 err_mess += "entry notes\n %s" % sys.exc_value 1579 self.errors.append(err_mess) 1580 logging.error(err_mess) 1581 1582 note_list = item.xpath('ns:SASprocessnote', 1583 namespaces={'ns': CANSAS_NS}) 1584 for note in note_list: 1585 if note.text is not None: 1586 process.notes.append(note.text.strip()) 1587 1588 data_info.process.append(process) 1589 1590 # Data info ###################### 1591 nodes = dom.xpath('ns:SASdata', namespaces={'ns': CANSAS_NS}) 1592 if len(nodes) > 1: 1593 raise RuntimeError, "CanSAS reader is not compatible with" + \ 1594 " multiple SASdata entries" 1595 1596 for entry in nodes: 1597 for item in LIST_OF_DATA_2D_ATTR: 1598 # get node 1599 node = get_content('ns:%s' % item[0], entry) 1600 setattr(data_info, item[1], parse_entry_helper(node, item)) 1601 1602 for item in LIST_OF_DATA_2D_VALUES: 1603 field = get_content('ns:%s' % item[0], entry) 1604 value_list = [] 1605 if field is not None: 1606 value_list = \ 1607 [parse_entry_helper(node, item) for node in field] 1608 if len(value_list) < 2: 1609 setattr(data_info, item[0], None) 1610 else: 1611 setattr(data_info, item[0], numpy.array(value_list)) 1612 1613 return data_info 1254 return_value, _ = self._parse_entry(dom) 1255 return return_value, _ 1614 1256 1615 1257 def _read_cansas(self, path): … … 1692 1334 name = original_fname 1693 1335 state.data.group_id = name 1336 state.version = fitstate.version 1694 1337 # store state in fitting 1695 1338 self.call_back(state=state, … … 1745 1388 state.data.run_name[0] = state.data.name 1746 1389 1747 if issubclass(state.data.__class__, 1748 sas.sascalc.dataloader.data_info.Data1D): 1749 data = state.data 1750 doc, sasentry = self._to_xml_doc(data) 1751 else: 1752 data = state.data 1753 doc, sasentry = self._data2d_to_xml_doc(data) 1390 data = state.data 1391 doc, sasentry = self._to_xml_doc(data) 1754 1392 1755 1393 if state is not None: -
src/sas/sasgui/perspectives/invariant/invariant_state.py
rcb93b40 rdb5294e 423 423 for item in DEFAULT_STATE: 424 424 input_field = get_content('ns:%s' % item, entry) 425 val = str(input_field.text.strip()) 425 if input_field.text is not None: 426 val = str(input_field.text.strip()) 427 else: 428 val = '' 426 429 if input_field is not None: 427 430 temp_state[item] = val … … 433 436 for item in DEFAULT_STATE: 434 437 input_field = get_content('ns:%s' % item, entry) 435 val = str(input_field.text.strip()) 438 if input_field.text is not None: 439 val = str(input_field.text.strip()) 440 else: 441 val = '' 436 442 if input_field is not None: 437 443 self.set_saved_state(name=item, value=val) -
src/sas/sasgui/perspectives/pr/media/pr_help.rst
r0391dae r1221196 10 10 ----------- 11 11 12 This tool calculates a real-space distance distribution function, *P(r)*, using 13 the inversion approach (Moore, 19 08).12 This tool calculates a real-space distance distribution function, *P(r)*, using 13 the inversion approach (Moore, 1980). 14 14 15 15 *P(r)* is set to be equal to an expansion of base functions of the type … … 24 24 25 25 \chi^2=\frac{\sum_i (I_{meas}(Q_i)-I_{th}(Q_i))^2}{error^2}+Reg\_term 26 26 27 27 28 28 where $I_{meas}(Q_i)$ is the measured scattering intensity and $I_{th}(Q_i)$ is 29 the prediction from the Fourier transform of the *P(r)* expansion. 29 the prediction from the Fourier transform of the *P(r)* expansion. 30 30 31 The $Reg\_term$ term is a regularization term set to the second derivative 31 The $Reg\_term$ term is a regularization term set to the second derivative 32 32 $d^2P(r)/d^2r$ integrated over $r$. It is used to produce a smooth *P(r)* output. 33 33 … … 40 40 41 41 * *Number of terms*: the number of base functions in the P(r) expansion. 42 42 43 43 * *Regularization constant*: a multiplicative constant to set the size of 44 44 the regularization term. -
src/sas/sascalc/data_util/qsmearing.py
rd3911e3 r157b716 13 13 import logging 14 14 import sys 15 15 import numpy as np # type: ignore 16 from numpy import pi, exp # type:ignore 16 17 from sasmodels.resolution import Slit1D, Pinhole1D 18 from sasmodels.sesans import SesansTransform 17 19 from sasmodels.resolution2d import Pinhole2D 20 from src.sas.sascalc.data_util.nxsunit import Converter 18 21 19 22 def smear_selection(data, model = None): … … 36 39 # Sanity check. If we are not dealing with a SAS Data1D 37 40 # object, just return None 41 # This checks for 2D data (does not throw exception because fail is common) 38 42 if data.__class__.__name__ not in ['Data1D', 'Theory1D']: 39 43 if data == None: … … 41 45 elif data.dqx_data == None or data.dqy_data == None: 42 46 return None 43 return P ySmear2D(data, model)44 47 return Pinhole2D(data) 48 # This checks for 1D data with smearing info in the data itself (again, fail is likely; no exceptions) 45 49 if not hasattr(data, "dx") and not hasattr(data, "dxl")\ 46 50 and not hasattr(data, "dxw"): … … 48 52 49 53 # Look for resolution smearing data 54 # This is the code that checks for SESANS data; it looks for the file loader 55 # TODO: change other sanity checks to check for file loader instead of data structure? 56 _found_sesans = False 57 #if data.dx is not None and data.meta_data['loader']=='SESANS': 58 if data.dx is not None and data.isSesans: 59 #if data.dx[0] > 0.0: 60 if numpy.size(data.dx[data.dx <= 0]) == 0: 61 _found_sesans = True 62 # if data.dx[0] <= 0.0: 63 if numpy.size(data.dx[data.dx <= 0]) > 0: 64 raise ValueError('one or more of your dx values are negative, please check the data file!') 65 66 if _found_sesans == True: 67 #Pre-compute the Hankel matrix (H) 68 qmax, qunits = data.sample.zacceptance 69 SElength = Converter(data._xunit)(data.x, "A") 70 zaccept = Converter(qunits)(qmax, "1/A"), 71 Rmax = 10000000 72 hankel = SesansTransform(data.x, SElength, zaccept, Rmax) 73 # Then return the actual transform, as if it were a smearing function 74 return PySmear(hankel, model, offset=0) 75 50 76 _found_resolution = False 51 77 if data.dx is not None and len(data.dx) == len(data.x): … … 89 115 Wrapper for pure python sasmodels resolution functions. 90 116 """ 91 def __init__(self, resolution, model ):117 def __init__(self, resolution, model, offset=None): 92 118 self.model = model 93 119 self.resolution = resolution 94 self.offset = numpy.searchsorted(self.resolution.q_calc, self.resolution.q[0]) 120 if offset is None: 121 offset = numpy.searchsorted(self.resolution.q_calc, self.resolution.q[0]) 122 self.offset = offset 95 123 96 124 def apply(self, iq_in, first_bin=0, last_bin=None): -
src/sas/sascalc/dataloader/data_info.py
r345e7e4 rad4632c 25 25 import numpy 26 26 import math 27 28 class plottable_sesans1D(object):29 """30 SESANS is a place holder for 1D SESANS plottables.31 32 #TODO: This was directly copied from the plottables_1D. Modified Somewhat.33 #Class has been updated.34 """35 # The presence of these should be mutually36 # exclusive with the presence of Qdev (dx)37 x = None38 y = None39 lam = None40 dx = None41 dy = None42 dlam = None43 ## Slit smearing length44 dxl = None45 ## Slit smearing width46 dxw = None47 48 # Units49 _xaxis = ''50 _xunit = ''51 _yaxis = ''52 _yunit = ''53 54 def __init__(self, x, y, lam, dx=None, dy=None, dlam=None):55 # print "SESANS plottable working"56 self.x = numpy.asarray(x)57 self.y = numpy.asarray(y)58 self.lam = numpy.asarray(lam)59 if dx is not None:60 self.dx = numpy.asarray(dx)61 if dy is not None:62 self.dy = numpy.asarray(dy)63 if dlam is not None:64 self.dlam = numpy.asarray(dlam)65 66 def xaxis(self, label, unit):67 """68 set the x axis label and unit69 """70 self._xaxis = label71 self._xunit = unit72 73 def yaxis(self, label, unit):74 """75 set the y axis label and unit76 """77 self._yaxis = label78 self._yunit = unit79 80 27 81 28 class plottable_1D(object): … … 93 40 ## Slit smearing width 94 41 dxw = None 42 ## SESANS specific params (wavelengths for spin echo length calculation) 43 lam = None 44 dlam = None 95 45 96 46 # Units … … 100 50 _yunit = '' 101 51 102 def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None ):52 def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None, lam=None, dlam=None): 103 53 self.x = numpy.asarray(x) 104 54 self.y = numpy.asarray(y) … … 111 61 if dxw is not None: 112 62 self.dxw = numpy.asarray(dxw) 63 if lam is not None: 64 self.lam = numpy.asarray(lam) 65 if dlam is not None: 66 self.dlam = numpy.asarray(dlam) 113 67 114 68 def xaxis(self, label, unit): … … 398 352 ## Details 399 353 details = None 354 ## SESANS zacceptance 355 zacceptance = None 400 356 401 357 def __init__(self): … … 535 491 ## Loading errors 536 492 errors = None 493 ## SESANS data check 494 isSesans = None 495 537 496 538 497 def __init__(self): … … 567 526 ## Loading errors 568 527 self.errors = [] 528 ## SESANS data check 529 self.isSesans = False 569 530 570 531 def append_empty_process(self): … … 586 547 _str += "Title: %s\n" % self.title 587 548 _str += "Run: %s\n" % str(self.run) 549 _str += "SESANS: %s\n" % str(self.isSesans) 588 550 _str += "Instrument: %s\n" % str(self.instrument) 589 551 _str += "%s\n" % str(self.sample) … … 736 698 return self._perform_union(other) 737 699 738 class SESANSData1D(plottable_sesans1D, DataInfo): 739 """ 740 SESANS 1D data class 741 """ 742 x_unit = 'nm' 743 y_unit = 'pol' 744 745 def __init__(self, x=None, y=None, lam=None, dx=None, dy=None, dlam=None): 700 class Data1D(plottable_1D, DataInfo): 701 """ 702 1D data class 703 """ 704 def __init__(self, x=None, y=None, dx=None, dy=None, lam=None, dlam=None, isSesans=None): 746 705 DataInfo.__init__(self) 747 plottable_sesans1D.__init__(self, x, y, lam, dx, dy, dlam) 706 plottable_1D.__init__(self, x, y, dx, dy,None, None, lam, dlam) 707 self.isSesans = isSesans 708 try: 709 if self.isSesans: # the data is SESANS 710 self.x_unit = 'A' 711 self.y_unit = 'pol' 712 elif not self.isSesans: # the data is SANS 713 self.x_unit = '1/A' 714 self.y_unit = '1/cm' 715 except: # the data is not recognized/supported, and the user is notified 716 raise(TypeError, 'data not recognized, check documentation for supported 1D data formats') 748 717 749 718 def __str__(self): … … 759 728 return _str 760 729 761 def clone_without_data(self, length=0, clone=None):762 """763 Clone the current object, without copying the data (which764 will be filled out by a subsequent operation).765 The data arrays will be initialized to zero.766 767 :param length: length of the data array to be initialized768 :param clone: if provided, the data will be copied to clone769 """770 from copy import deepcopy771 if clone is None or not issubclass(clone.__class__, Data1D):772 x = numpy.zeros(length)773 dx = numpy.zeros(length)774 y = numpy.zeros(length)775 dy = numpy.zeros(length)776 clone = Data1D(x, y, dx=dx, dy=dy)777 778 clone.title = self.title779 clone.run = self.run780 clone.filename = self.filename781 clone.instrument = self.instrument782 clone.notes = deepcopy(self.notes)783 clone.process = deepcopy(self.process)784 clone.detector = deepcopy(self.detector)785 clone.sample = deepcopy(self.sample)786 clone.source = deepcopy(self.source)787 clone.collimation = deepcopy(self.collimation)788 clone.trans_spectrum = deepcopy(self.trans_spectrum)789 clone.meta_data = deepcopy(self.meta_data)790 clone.errors = deepcopy(self.errors)791 792 return clone793 794 class Data1D(plottable_1D, DataInfo):795 """796 1D data class797 """798 x_unit = '1/A'799 y_unit = '1/cm'800 801 def __init__(self, x, y, dx=None, dy=None):802 DataInfo.__init__(self)803 plottable_1D.__init__(self, x, y, dx, dy)804 805 def __str__(self):806 """807 Nice printout808 """809 _str = "%s\n" % DataInfo.__str__(self)810 _str += "Data:\n"811 _str += " Type: %s\n" % self.__class__.__name__812 _str += " X-axis: %s\t[%s]\n" % (self._xaxis, self._xunit)813 _str += " Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit)814 _str += " Length: %g\n" % len(self.x)815 return _str816 817 730 def is_slit_smeared(self): 818 731 """ … … 843 756 y = numpy.zeros(length) 844 757 dy = numpy.zeros(length) 845 clone = Data1D(x, y, dx=dx, dy=dy) 758 lam = numpy.zeros(length) 759 dlam = numpy.zeros(length) 760 clone = Data1D(x, y, lam=lam, dx=dx, dy=dy, dlam=dlam) 846 761 847 762 clone.title = self.title … … 1021 936 def __init__(self, data=None, err_data=None, qx_data=None, 1022 937 qy_data=None, q_data=None, mask=None, 1023 dqx_data=None, dqy_data=None): 1024 self.y_bins = [] 1025 self.x_bins = [] 938 dqx_data=None, dqy_data=None, isSesans=None): 1026 939 DataInfo.__init__(self) 1027 940 plottable_2D.__init__(self, data, err_data, qx_data, 1028 941 qy_data, q_data, mask, dqx_data, dqy_data) 942 self.y_bins = [] 943 self.x_bins = [] 944 self.isSesans=isSesans 945 1029 946 if len(self.detector) > 0: 1030 947 raise RuntimeError, "Data2D: Detector bank already filled at init" … … 1265 1182 final_dataset.xmin = data.xmin 1266 1183 final_dataset.ymin = data.ymin 1184 final_dataset.isSesans = datainfo.isSesans 1267 1185 final_dataset.title = datainfo.title 1268 1186 final_dataset.run = datainfo.run -
src/sas/sascalc/dataloader/readers/cansas_constants.py
r250fec92 rad4632c 133 133 "variable" : None, 134 134 "children" : {"Idata" : SASDATA_IDATA, 135 "Sesans": {"storeas": "content"}, 136 "zacceptance": {"storeas": "float"}, 135 137 "<any>" : ANY 136 138 } -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r345e7e4 rb5db35d 8 8 import numpy 9 9 import os 10 from sas.sascalc.dataloader.data_info import SESANSData1D10 from sas.sascalc.dataloader.data_info import Data1D 11 11 12 12 # Check whether we have a converter available … … 59 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 60 buff = input_f.read() 61 # print buff62 61 lines = buff.splitlines() 63 # print lines64 #Jae could not find python universal line spliter:65 #keep the below for now66 # some ascii data has \r line separator,67 # try it when the data is on only one long line68 # if len(lines) < 2 :69 # lines = buff.split('\r')70 71 62 x = numpy.zeros(0) 72 63 y = numpy.zeros(0) … … 83 74 tdlam = numpy.zeros(0) 84 75 tdx = numpy.zeros(0) 85 # print "all good" 86 output = SESANSData1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam) 87 # print output 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 88 77 self.filename = output.filename = basename 89 78 90 # #Initialize counters for data lines and header lines.91 # is_data = False # Has more than 5 lines92 # # More than "5" lines of data is considered as actual93 # # data unless that is the only data94 # mum_data_lines = 595 # # To count # of current data candidate lines96 # i = -197 # # To count total # of previous data candidate lines98 # i1 = -199 # # To count # of header lines100 # j = -1101 # # Helps to count # of header lines102 # j1 = -1103 # #minimum required number of columns of data; ( <= 4).104 # lentoks = 2105 79 paramnames=[] 106 80 paramvals=[] … … 111 85 Pvals=[] 112 86 dPvals=[] 113 # print x 114 # print zvals 87 115 88 for line in lines: 116 89 # Initial try for CSV (split on ,) … … 122 95 if len(toks)>5: 123 96 zvals.append(toks[0]) 124 dzvals.append(toks[ 1])125 lamvals.append(toks[ 2])126 dlamvals.append(toks[ 3])127 Pvals.append(toks[ 4])128 dPvals.append(toks[ 5])97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 129 102 else: 130 103 continue … … 140 113 default_z_unit = "A" 141 114 data_conv_P = None 142 default_p_unit = " " 115 default_p_unit = " " # Adjust unit for axis (L^-3) 143 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 144 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 145 120 valrange=range(1, len(zvals)) … … 161 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 162 137 output.y = y 138 output.y_unit = '\AA^{-2} cm^{-1}' # output y_unit added 163 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 164 140 output.dy = dy … … 166 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 167 143 168 output.xaxis("\ rm{z}", output.x_unit)169 output.yaxis("\\rm{ P/P0}", output.y_unit)144 output.xaxis("\\rm{z}", output.x_unit) 145 output.yaxis("\\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 170 146 # Store loading process information 171 147 output.meta_data['loader'] = self.type_name 172 output.sample.thickness = float(paramvals[6])148 #output.sample.thickness = float(paramvals[6]) 173 149 output.sample.name = paramvals[1] 174 150 output.sample.ID = paramvals[0] 175 151 zaccept_unit_split = paramnames[7].split("[") 176 152 zaccept_unit = zaccept_unit_split[1].replace("]","") 177 if zaccept_unit.strip() == '\AA^-1' :153 if zaccept_unit.strip() == '\AA^-1' or zaccept_unit.strip() == '\A^-1': 178 154 zaccept_unit = "1/A" 179 155 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 180 output.vars =varheader156 output.vars = varheader 181 157 182 158 if len(output.x) < 1: -
src/sas/sascalc/fit/AbstractFitEngine.py
rd3911e3 ra9f579c 131 131 a way to get residuals from data. 132 132 """ 133 def __init__(self, x, y, dx=None, dy=None, smearer=None, data=None ):133 def __init__(self, x, y, dx=None, dy=None, smearer=None, data=None, lam=None, dlam=None): 134 134 """ 135 135 :param smearer: is an object of class QSmearer or SlitSmearer … … 152 152 153 153 """ 154 Data1D.__init__(self, x=x, y=y, dx=dx, dy=dy )154 Data1D.__init__(self, x=x, y=y, dx=dx, dy=dy, lam=lam, dlam=dlam) 155 155 self.num_points = len(x) 156 156 self.sas_data = data -
src/sas/sasgui/guiframe/dataFitting.py
r345e7e4 r68adf86 17 17 """ 18 18 """ 19 def __init__(self, x=None, y=None, dx=None, dy=None): 19 20 def __init__(self, x=None, y=None, dx=None, dy=None, lam=None, dlam=None, isSesans=False): 20 21 """ 21 22 """ … … 24 25 if y is None: 25 26 y = [] 26 PlotData1D.__init__(self, x, y, dx, dy) 27 LoadData1D.__init__(self, x, y, dx, dy) 27 self.isSesans = isSesans 28 PlotData1D.__init__(self, x, y, dx, dy, lam, dlam) 29 LoadData1D.__init__(self, x, y, dx, dy, lam, dlam, isSesans) 30 28 31 self.id = None 29 32 self.list_group_id = [] … … 32 35 self.path = None 33 36 self.xtransform = None 37 if self.isSesans: 38 self.xtransform = "x" 34 39 self.ytransform = None 40 if self.isSesans: 41 self.ytransform = "y" 35 42 self.title = "" 36 43 self.scale = None … … 68 75 # First, check the data compatibility 69 76 dy, dy_other = self._validity_check(other) 70 result = Data1D(x=[], y=[], dx=None, dy=None)77 result = Data1D(x=[], y=[], lam=[], dx=None, dy=None, dlam=None) 71 78 result.clone_without_data(length=len(self.x), clone=self) 72 79 result.copy_from_datainfo(data1d=self) … … 115 122 # First, check the data compatibility 116 123 self._validity_check_union(other) 117 result = Data1D(x=[], y=[], dx=None, dy=None)124 result = Data1D(x=[], y=[], lam=[], dx=None, dy=None, dlam=None) 118 125 tot_length = len(self.x) + len(other.x) 119 126 result = self.clone_without_data(length=tot_length, clone=result) 127 if self.dlam == None or other.dlam is None: 128 result.dlam = None 129 else: 130 result.dlam = numpy.zeros(tot_length) 120 131 if self.dy == None or other.dy is None: 121 132 result.dy = None … … 141 152 result.y = numpy.append(self.y, other.y) 142 153 result.y = result.y[ind] 154 result.lam = numpy.append(self.lam, other.lam) 155 result.lam = result.lam[ind] 156 if result.dlam != None: 157 result.dlam = numpy.append(self.dlam, other.dlam) 158 result.dlam = result.dlam[ind] 143 159 if result.dy != None: 144 160 result.dy = numpy.append(self.dy, other.dy) … … 260 276 # First, check the data compatibility 261 277 self._validity_check_union(other) 262 result = Data1D(x=[], y=[], dx=None, dy=None)278 result = Data1D(x=[], y=[], lam=[], dx=None, dy=None, dlam=[]) 263 279 tot_length = len(self.x)+len(other.x) 264 280 result.clone_without_data(length=tot_length, clone=self) 281 if self.dlam == None or other.dlam is None: 282 result.dlam = None 283 else: 284 result.dlam = numpy.zeros(tot_length) 265 285 if self.dy == None or other.dy is None: 266 286 result.dy = None … … 285 305 result.y = numpy.append(self.y, other.y) 286 306 result.y = result.y[ind] 307 result.lam = numpy.append(self.lam, other.lam) 308 result.lam = result.lam[ind] 287 309 if result.dy != None: 288 310 result.dy = numpy.append(self.dy, other.dy) -
src/sas/sasgui/guiframe/data_manager.py
r345e7e4 r20522e1 61 61 62 62 if issubclass(Data2D, data.__class__): 63 new_plot = Data2D(image=None, err_image=None) 64 else: 65 new_plot = Data1D(x=[], y=[], dx=None, dy=None) 66 63 new_plot = Data2D(image=None, err_image=None, isSesans=data.isSesans) # For now, isSesans for 2D data is always false 64 else: 65 new_plot = Data1D(x=[], y=[], dx=None, dy=None, lam=None, dlam=None, isSesans=data.isSesans) 66 67 68 #elif data.meta_data['loader'] == 'SESANS': 69 # new_plot = Data1D(x=[], y=[], dx=None, dy=None, lam=None, dlam=None, isSesans=True) 70 #else: 71 # new_plot = Data1D(x=[], y=[], dx=None, dy=None, lam=None, dlam=None) #SESANS check??? 72 67 73 new_plot.copy_from_datainfo(data) 68 74 data.clone_without_data(clone=new_plot) -
src/sas/sasgui/plottools/plottables.py
r345e7e4 ra9f579c 1023 1023 """ 1024 1024 1025 def __init__(self, x, y, dx=None, dy=None ):1025 def __init__(self, x, y, dx=None, dy=None, lam=None, dlam=None): 1026 1026 """ 1027 1027 Draw points specified by x[i],y[i] in the current color/symbol. … … 1037 1037 self.x = x 1038 1038 self.y = y 1039 self.lam = lam 1039 1040 self.dx = dx 1040 1041 self.dy = dy 1042 self.dlam = dlam 1041 1043 self.source = None 1042 1044 self.detector = None
Note: See TracChangeset
for help on using the changeset viewer.