Changeset af08e55 in sasview for src/sas/sascalc
- Timestamp:
- Jan 12, 2017 11:01:09 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 1905128
- Parents:
- 83c09af
- Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader.py
r83c09af raf08e55 21 21 # For saving individual sections of data 22 22 from sas.sascalc.dataloader.data_info import Data1D, Data2D, DataInfo, \ 23 plottable_1D 23 plottable_1D, plottable_2D 24 24 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, \ 25 25 Detector, Process, Aperture … … 59 59 The CanSAS reader requires PyXML 0.8.4 or later. 60 60 """ 61 # #CanSAS version - defaults to version 1.061 # CanSAS version - defaults to version 1.0 62 62 cansas_version = "1.0" 63 63 base_ns = "{cansas1d/1.0}" … … 66 66 invalid = True 67 67 frm = "" 68 # #Log messages and errors68 # Log messages and errors 69 69 logging = None 70 70 errors = set() 71 # #Namespace hierarchy for current xml_file object71 # Namespace hierarchy for current xml_file object 72 72 names = None 73 73 ns_list = None 74 # #Temporary storage location for loading multiple data sets in a single file74 # Temporary storage location for loading multiple data sets in a single file 75 75 current_datainfo = None 76 76 current_dataset = None 77 77 current_data1d = None 78 78 data = None 79 # #List of data1D objects to be sent back to SasView79 # List of data1D objects to be sent back to SasView 80 80 output = None 81 # #Wildcards81 # Wildcards 82 82 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] 83 # #List of allowed extensions83 # List of allowed extensions 84 84 ext = ['.xml', '.XML', '.svs', '.SVS'] 85 # #Flag to bypass extension check85 # Flag to bypass extension check 86 86 allow_all = True 87 87 … … 223 223 self.parent_class = tagname_original 224 224 if tagname == 'SASdata': 225 self._initialize_new_data_set() 226 ## Recursion step to access data within the group 225 self._initialize_new_data_set(node) 226 if isinstance(self.current_dataset, plottable_2D): 227 x_bins = attr.get("x_bins", "") 228 y_bins = attr.get("y_bins", "") 229 if x_bins is not "" and y_bins is not "": 230 self.current_dataset.shape = (x_bins, y_bins) 231 else: 232 self.current_dataset.shape = () 233 # Recursion step to access data within the group 227 234 self._parse_entry(node, True) 228 235 if tagname == "SASsample": … … 239 246 data_point, unit = self._get_node_value(node, tagname) 240 247 241 # #If this is a dataset, store the data appropriately248 # If this is a dataset, store the data appropriately 242 249 if tagname == 'Run': 243 250 self.current_datainfo.run_name[data_point] = name … … 248 255 self.current_datainfo.notes.append(data_point) 249 256 250 # # I and Q Data251 elif tagname == 'I' :257 # I and Q - 1D data 258 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 252 259 self.current_dataset.yaxis("Intensity", unit) 253 260 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 254 elif tagname == 'Idev' :261 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 255 262 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 256 263 elif tagname == 'Q': … … 267 274 elif tagname == 'Shadowfactor': 268 275 pass 269 270 ## Sample Information 276 # I and Qx, Qy - 2D data 277 elif tagname == 'I' and isinstance(self.current_dataset, plottable_2D): 278 self.current_dataset.yaxis("Intensity", unit) 279 self.current_dataset.data = np.append(self.current_dataset.data, data_point) 280 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_2D): 281 self.current_dataset.err_data = np.append(self.current_dataset.err_data, data_point) 282 elif tagname == 'Qx': 283 self.current_dataset.xaxis("Qx", unit) 284 self.current_dataset.qx_data = np.append(self.current_dataset.qx_data, data_point) 285 elif tagname == 'Qy': 286 self.current_dataset.yaxis("Qy", unit) 287 self.current_dataset.qy_data = np.append(self.current_dataset.qy_data, data_point) 288 elif tagname == 'Qxdev': 289 self.current_dataset.xaxis("Qxdev", unit) 290 self.current_dataset.dqx_data = np.append(self.current_dataset.dqx_data, data_point) 291 elif tagname == 'Qydev': 292 self.current_dataset.yaxis("Qydev", unit) 293 self.current_dataset.dqy_data = np.append(self.current_dataset.dqy_data, data_point) 294 elif tagname == 'Mask': 295 self.current_dataset.mask = np.append(self.current_dataset.mask, bool(data_point)) 296 297 # Sample Information 271 298 elif tagname == 'ID' and self.parent_class == 'SASsample': 272 299 self.current_datainfo.sample.ID = data_point … … 302 329 self.current_datainfo.sample.orientation_unit = unit 303 330 304 # #Instrumental Information331 # Instrumental Information 305 332 elif tagname == 'name' and self.parent_class == 'SASinstrument': 306 333 self.current_datainfo.instrument = data_point 307 # #Detector Information334 # Detector Information 308 335 elif tagname == 'name' and self.parent_class == 'SASdetector': 309 336 self.detector.name = data_point … … 350 377 self.detector.orientation.z = data_point 351 378 self.detector.orientation_unit = unit 352 # #Collimation and Aperture379 # Collimation and Aperture 353 380 elif tagname == 'length' and self.parent_class == 'SAScollimation': 354 381 self.collimation.length = data_point … … 369 396 self.collimation.size_unit = unit 370 397 371 # #Process Information398 # Process Information 372 399 elif tagname == 'name' and self.parent_class == 'SASprocess': 373 400 self.process.name = data_point … … 389 416 self.process.term.append(dic) 390 417 391 # #Transmission Spectrum418 # Transmission Spectrum 392 419 elif tagname == 'T' and self.parent_class == 'Tdata': 393 420 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) … … 400 427 self.transspectrum.wavelength_unit = unit 401 428 402 # #Source Information429 # Source Information 403 430 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 404 431 self.current_datainfo.source.wavelength = data_point … … 427 454 self.current_datainfo.source.beam_shape = data_point 428 455 429 # #Everything else goes in meta_data456 # Everything else goes in meta_data 430 457 else: 431 458 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) … … 441 468 self.add_data_set() 442 469 empty = None 443 if self.output[0].dx is not None:444 self.output[0].dxl = np.empty(0)445 self.output[0].dxw = np.empty(0)446 else:447 self.output[0].dx = np.empty(0)448 470 return self.output[0], empty 449 471 … … 517 539 self.current_datainfo = DataInfo() 518 540 519 def _initialize_new_data_set(self, parent_list=None):541 def _initialize_new_data_set(self, node=None): 520 542 """ 521 543 A private class method to generate a new 1D data object. 522 544 Outside methods should call add_data_set() to be sure any existing data is stored properly. 523 545 524 :param parent_list: List of names of parent elements 525 """ 526 527 if parent_list is None: 528 parent_list = [] 546 :param node: XML node to determine if 1D or 2D data 547 """ 529 548 x = np.array(0) 530 549 y = np.array(0) 550 for child in node: 551 if child.tag.replace(self.base_ns, "") == "Idata": 552 for i_child in child: 553 if i_child.tag.replace(self.base_ns, "") == "Qx": 554 self.current_dataset = plottable_2D() 555 return 531 556 self.current_dataset = plottable_1D(x, y) 532 557 … … 563 588 """ 564 589 565 # #Append errors to dataset and reset class errors590 # Append errors to dataset and reset class errors 566 591 self.current_datainfo.errors = set() 567 592 for error in self.errors: … … 569 594 self.errors.clear() 570 595 571 # #Combine all plottables with datainfo and append each to output572 # #Type cast data arrays to float64 and find min/max as appropriate596 # Combine all plottables with datainfo and append each to output 597 # Type cast data arrays to float64 and find min/max as appropriate 573 598 for dataset in self.data: 574 if dataset.x is not None: 575 dataset.x = np.delete(dataset.x, [0]) 576 dataset.x = dataset.x.astype(np.float64) 577 dataset.xmin = np.min(dataset.x) 578 dataset.xmax = np.max(dataset.x) 579 if dataset.y is not None: 580 dataset.y = np.delete(dataset.y, [0]) 581 dataset.y = dataset.y.astype(np.float64) 582 dataset.ymin = np.min(dataset.y) 583 dataset.ymax = np.max(dataset.y) 584 if dataset.dx is not None: 585 dataset.dx = np.delete(dataset.dx, [0]) 586 dataset.dx = dataset.dx.astype(np.float64) 587 if dataset.dxl is not None: 588 dataset.dxl = np.delete(dataset.dxl, [0]) 589 dataset.dxl = dataset.dxl.astype(np.float64) 590 if dataset.dxw is not None: 591 dataset.dxw = np.delete(dataset.dxw, [0]) 592 dataset.dxw = dataset.dxw.astype(np.float64) 593 if dataset.dy is not None: 594 dataset.dy = np.delete(dataset.dy, [0]) 595 dataset.dy = dataset.dy.astype(np.float64) 596 np.trim_zeros(dataset.x) 597 np.trim_zeros(dataset.y) 598 np.trim_zeros(dataset.dy) 599 if isinstance(dataset, plottable_1D): 600 if dataset.x is not None: 601 dataset.x = np.delete(dataset.x, [0]) 602 dataset.x = dataset.x.astype(np.float64) 603 dataset.xmin = np.min(dataset.x) 604 dataset.xmax = np.max(dataset.x) 605 if dataset.y is not None: 606 dataset.y = np.delete(dataset.y, [0]) 607 dataset.y = dataset.y.astype(np.float64) 608 dataset.ymin = np.min(dataset.y) 609 dataset.ymax = np.max(dataset.y) 610 if dataset.dx is not None: 611 dataset.dx = np.delete(dataset.dx, [0]) 612 dataset.dx = dataset.dx.astype(np.float64) 613 if dataset.dxl is not None: 614 dataset.dxl = np.delete(dataset.dxl, [0]) 615 dataset.dxl = dataset.dxl.astype(np.float64) 616 if dataset.dxw is not None: 617 dataset.dxw = np.delete(dataset.dxw, [0]) 618 dataset.dxw = dataset.dxw.astype(np.float64) 619 if dataset.dy is not None: 620 dataset.dy = np.delete(dataset.dy, [0]) 621 dataset.dy = dataset.dy.astype(np.float64) 622 np.trim_zeros(dataset.x) 623 np.trim_zeros(dataset.y) 624 np.trim_zeros(dataset.dy) 625 elif isinstance(dataset, plottable_2D): 626 dataset.data = np.delete(dataset.data, [0]) 627 dataset.data = dataset.data.astype(np.float64) 628 dataset.qx_data = np.delete(dataset.qx_data, [0]) 629 dataset.qx_data = dataset.qx_data.astype(np.float64) 630 dataset.xmin = np.min(dataset.qx_data) 631 dataset.xmax = np.max(dataset.qx_data) 632 dataset.qy_data = np.delete(dataset.qy_data, [0]) 633 dataset.qy_data = dataset.qy_data.astype(np.float64) 634 dataset.ymin = np.min(dataset.qy_data) 635 dataset.ymax = np.max(dataset.qy_data) 636 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 637 + dataset.qy_data * dataset.qy_data) 638 if dataset.err_data is not None: 639 dataset.err_data = np.delete(dataset.err_data, [0]) 640 dataset.err_data = dataset.err_data.astype(np.float64) 641 if dataset.dqx_data is not None: 642 dataset.dqx_data = np.delete(dataset.dqx_data, [0]) 643 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 644 if dataset.dqy_data is not None: 645 dataset.dqy_data = np.delete(dataset.dqy_data, [0]) 646 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 647 if dataset.mask is not None: 648 dataset.mask = np.delete(dataset.mask, [0]) 649 dataset.mask = dataset.mask.astype(dtype=bool) 650 651 if len(dataset.shape) == 2: 652 n_rows, n_cols = dataset.shape 653 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 654 dataset.x_bins = dataset.qx_data[:int(n_cols)] 655 dataset.data = dataset.data.flatten() 656 else: 657 dataset.y_bins = [] 658 dataset.x_bins = [] 659 dataset.data = dataset.data.flatten() 660 599 661 final_dataset = combine_data(dataset, self.current_datainfo) 600 662 self.output.append(final_dataset) … … 696 758 and local_unit.lower() != "none": 697 759 if HAS_CONVERTER == True: 698 # #Check local units - bad units raise KeyError760 # Check local units - bad units raise KeyError 699 761 data_conv_q = Converter(local_unit) 700 762 value_unit = default_unit … … 743 805 A method to check all resolution data sets are the same size as I and Q 744 806 """ 745 dql_exists = False 746 dqw_exists = False 747 dq_exists = False 748 di_exists = False 749 if self.current_dataset.dxl is not None: 750 dql_exists = True 751 if self.current_dataset.dxw is not None: 752 dqw_exists = True 753 if self.current_dataset.dx is not None: 754 dq_exists = True 755 if self.current_dataset.dy is not None: 756 di_exists = True 757 if dqw_exists and not dql_exists: 758 array_size = self.current_dataset.dxw.size - 1 759 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 760 elif dql_exists and not dqw_exists: 761 array_size = self.current_dataset.dxl.size - 1 762 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 763 elif not dql_exists and not dqw_exists and not dq_exists: 764 array_size = self.current_dataset.x.size - 1 765 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 766 if not di_exists: 767 array_size = self.current_dataset.y.size - 1 768 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 769 807 if isinstance(self.current_dataset, plottable_1D): 808 dql_exists = False 809 dqw_exists = False 810 dq_exists = False 811 di_exists = False 812 if self.current_dataset.dxl is not None: 813 dql_exists = True 814 if self.current_dataset.dxw is not None: 815 dqw_exists = True 816 if self.current_dataset.dx is not None: 817 dq_exists = True 818 if self.current_dataset.dy is not None: 819 di_exists = True 820 if dqw_exists and not dql_exists: 821 array_size = self.current_dataset.dxw.size - 1 822 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 823 np.zeros([array_size])) 824 elif dql_exists and not dqw_exists: 825 array_size = self.current_dataset.dxl.size - 1 826 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 827 np.zeros([array_size])) 828 elif not dql_exists and not dqw_exists and not dq_exists: 829 array_size = self.current_dataset.x.size - 1 830 self.current_dataset.dx = np.append(self.current_dataset.dx, 831 np.zeros([array_size])) 832 if not di_exists: 833 array_size = self.current_dataset.y.size - 1 834 self.current_dataset.dy = np.append(self.current_dataset.dy, 835 np.zeros([array_size])) 836 elif isinstance(self.current_dataset, plottable_2D): 837 dqx_exists = False 838 dqy_exists = False 839 di_exists = False 840 mask_exists = False 841 if self.current_dataset.dqx_data is not None: 842 dqx_exists = True 843 if self.current_dataset.dqy_data is not None: 844 dqy_exists = True 845 if self.current_dataset.err_data is not None: 846 di_exists = True 847 if self.current_dataset.mask is not None: 848 mask_exists = True 849 if not dqy_exists: 850 array_size = self.current_dataset.qy_data.size - 1 851 self.current_dataset.dqy_data = np.append( 852 self.current_dataset.dqy_data, np.zeros([array_size])) 853 if not dqx_exists: 854 array_size = self.current_dataset.qx_data.size - 1 855 self.current_dataset.dqx_data = np.append( 856 self.current_dataset.dqx_data, np.zeros([array_size])) 857 if not di_exists: 858 array_size = self.current_dataset.data.size - 1 859 self.current_dataset.err_data = np.append( 860 self.current_dataset.err_data, np.zeros([array_size])) 861 if not mask_exists: 862 array_size = self.current_dataset.data.size - 1 863 self.current_dataset.mask = np.append( 864 self.current_dataset.mask, 865 np.ones([array_size] ,dtype=bool)) 770 866 771 867 ####### All methods below are for writing CanSAS XML files ####### 772 773 868 774 869 def write(self, filename, datainfo): … … 795 890 :param datainfo: Data1D object 796 891 """ 892 is_2d = False 797 893 if issubclass(datainfo.__class__, Data2D): 798 894 is_2d = True … … 950 1046 """ 951 1047 node = self.create_element("SASdata") 1048 attr = {} 1049 if datainfo.data.shape: 1050 attr["x_bins"] = len(datainfo.x_bins) 1051 attr["y_bins"] = len(datainfo.y_bins) 952 1052 self.append(node, entry_node) 953 1053 … … 971 1071 {'unit': datainfo._xunit}) 972 1072 if datainfo.mask is not None and len(datainfo.mask) > i: 973 self.write_node(point, "Mask", datainfo. err_data[i])1073 self.write_node(point, "Mask", datainfo.mask[i]) 974 1074 975 1075 def _write_trans_spectrum(self, datainfo, entry_node): -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd
r83c09af raf08e55 62 62 <attribute name="name" type="string" use="optional" default="" /> 63 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 64 66 </complexType> 65 67 -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd
r83c09af raf08e55 62 62 <attribute name="name" type="string" use="optional" default="" /> 63 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 64 66 </complexType> 65 67
Note: See TracChangeset
for help on using the changeset viewer.