Changeset ad52d31 in sasview for src/sas


Ignore:
Timestamp:
Jun 20, 2016 11:56:49 AM (9 years ago)
Author:
krzywon
Branches:
master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
Children:
7673ecd
Parents:
53b9fc8
Message:

Modified the CanSAS HDF5 reader to pull in more instrument, sample, and other information not required to analyze the data.

File:
1 edited

Legend:

Unmodified
Added
Removed
  • src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py

    r68aa210 rad52d31  
    99import sys 
    1010 
    11 from sas.sascalc.dataloader.data_info import Data1D, Data2D, Sample, Source 
     11from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, Sample, Source 
    1212from sas.sascalc.dataloader.data_info import Process, Aperture, Collimation, TransmissionSpectrum, Detector 
    1313 
     
    1515class Reader(): 
    1616    """ 
    17     This is a placeholder for the epic class description I plan on writing in the future. But not today. 
     17    A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files 
     18    with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data 
     19    may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D 
     20    SESANS data. This class assumes a single data set for each sasentry. 
    1821 
    1922    :Dependencies: 
     
    3437    parent_list = None 
    3538    ## Data type name 
    36     type_name = "CanSAS 2D" 
     39    type_name = "CanSAS 2.0" 
    3740    ## Wildcards 
    38     type = ["CanSAS 2D HDF5 Files (*.h5)|*.h5"] 
     41    type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"] 
    3942    ## List of allowed extensions 
    4043    ext = ['.h5', '.H5'] 
     
    4649    def __init__(self): 
    4750        """ 
    48         Create the reader object and define initial states for certain class variables 
     51        Create the reader object and define initial states for class variables 
    4952        """ 
    5053        self.current_dataset = None 
     54        self.datasets = [] 
    5155        self.raw_data = None 
    5256        self.errors = set() 
     
    6468    def read(self, filename): 
    6569        """ 
    66         General read method called by the top-level SasView data_loader. 
     70        This is the general read method that all SasView data_loaders must have. 
    6771 
    6872        :param filename: A path for an HDF5 formatted CanSAS 2D data file. 
     
    7074        """ 
    7175 
    72         ## Reinitialize the class when loading new data file to reset all class variables 
     76        ## Reinitialize the class when loading a new data file to reset all class variables 
    7377        self.__init__() 
    7478        ## Check that the file exists 
     
    8286                ## Read in all child elements of top level SASroot 
    8387                self.read_children(self.raw_data) 
     88                ## Add the last data set to the list of outputs 
    8489                self.add_data_set() 
    8590        ## Return data set(s) 
     
    8893    def read_children(self, data, parent=u'SASroot'): 
    8994        """ 
    90         Recursive method for stepping through the hierarchy. Stores the data 
     95        A recursive method for stepping through the hierarchical data file. 
    9196 
    9297        :param data: h5py Group object of any kind 
     
    111116 
    112117            if isinstance(value, h5py.Group): 
     118                ##TODO: Rework this for multiple SASdata objects within a single SASentry to allow for both 1D and 2D 
     119                ##TODO:     data within the same SASentry - One 1D and one 2D data object for all SASdata sets? 
    113120                ## If this is a new sasentry, store the current data set and create a fresh Data1D/2D object 
    114121                if class_prog.match(u'SASentry'): 
    115122                    self.add_data_set(key) 
    116                 ## If the value is a group of data, iterate 
    117                 ## TODO: If Process, Aperture, etc, store and renew 
    118                 ##Recursion step to access data within the 
    119                 self.read_children(data.get(key), class_name) 
     123                ## Recursion step to access data within the group 
     124                self.read_children(value, class_name) 
     125                self.add_intermediate(class_name) 
    120126 
    121127            elif isinstance(value, h5py.Dataset): 
    122128                ## If this is a dataset, store the data appropriately 
    123                 ## TODO: Add instrumental information 
    124129                data_set = data[key][:] 
    125130 
     
    171176                        self.current_dataset.mask = np.append(self.current_dataset.mask, data_point) 
    172177 
    173                     ## Other Information 
    174                     elif key == u'wavelength': 
    175                         if data_set.size > 1: 
    176                             self.trans_spectrum.wavelength.append(data_point) 
    177                             self.source.wavelength = sum(self.trans_spectrum.wavelength)\ 
    178                                                      / len(self.trans_spectrum.wavelength) 
    179                         else: 
    180                             self.source.wavelength = data_point 
    181                     elif key == u'probe_type': 
    182                         self.source.radiation = data_point 
    183                     elif key == u'transmission': 
    184                         if data_set.size > 1: 
    185                             self.trans_spectrum.transmission.append(data_point) 
    186                             self.sample.transmission = sum(self.trans_spectrum.transmission) \ 
    187                                                      / len(self.trans_spectrum.transmission) 
    188                         else: 
    189                             self.sample.transmission = data_point 
    190  
    191178                    ## Sample Information 
    192179                    elif key == u'Title' and parent == u'SASsample': 
     
    196183                    elif key == u'temperature' and parent == u'SASsample': 
    197184                        self.sample.temperature = data_point 
     185 
     186                    ## Instrumental Information 
     187                    elif key == u'name' and parent == u'SASinstrument': 
     188                        self.current_dataset.instrument = data_point 
     189                    elif key == u'name' and parent == u'SASdetector': 
     190                        self.detector.name = data_point 
     191                    elif key == u'SDD' and parent == u'SASdetector': 
     192                        self.detector.distance = data_point 
     193                        self.detector.distance_unit = value.attrs.get(u'unit') 
     194                    elif key == u'SSD' and parent == u'SAScollimation': 
     195                        self.collimation.length = data_point 
     196                        self.collimation.length_unit = value.attrs.get(u'unit') 
     197                    elif key == u'name' and parent == u'SAScollimation': 
     198                        self.collimation.name = data_point 
    198199 
    199200                    ## Process Information 
     
    206207                    elif key == u'date' and parent == u'SASprocess': 
    207208                        self.process.date = data_point 
     209                    elif parent == u'SASprocess': 
     210                        self.process.notes.append(data_point) 
     211 
     212                    ## Transmission Spectrum 
     213                    elif key == u'T' and parent == u'SAStransmission_spectrum': 
     214                        self.trans_spectrum.transmission.append(data_point) 
     215                    elif key == u'Tdev' and parent == u'SAStransmission_spectrum': 
     216                        self.trans_spectrum.transmission_deviation.append(data_point) 
     217                    elif key == u'lambda' and parent == u'SAStransmission_spectrum': 
     218                        self.trans_spectrum.wavelength.append(data_point) 
     219 
     220                    ## Other Information 
     221                    elif key == u'wavelength' and parent == u'SASdata': 
     222                        self.source.wavelength = data_point 
     223                        self.source.wavelength.unit = value.attrs.get(u'unit') 
     224                    elif key == u'radiation' and parent == u'SASsource': 
     225                        self.source.radiation = data_point 
     226                    elif key == u'transmission' and parent == u'SASdata': 
     227                        self.sample.transmission = data_point 
    208228 
    209229                    ## Everything else goes in meta_data 
     
    216236                self.errors.add("ShouldNeverHappenException") 
    217237 
    218         return 
     238    def add_intermediate(self, parent): 
     239        """ 
     240        This method stores any intermediate objects within the final data set after fully reading the set. 
     241 
     242        :param parent: The NXclass name for the h5py Group object that just finished being processed 
     243        :return: 
     244        """ 
     245 
     246        if parent == u'SASprocess': 
     247            self.current_dataset.process.append(self.process) 
     248            self.process = Process() 
     249        elif parent == u'SASdetector': 
     250            self.current_dataset.detector.append(self.detector) 
     251            self.detector = Detector() 
     252        elif parent == u'SAStransmission_spectrum': 
     253            self.current_dataset.trans_spectrum.append(self.trans_spectrum) 
     254            self.trans_spectrum = TransmissionSpectrum() 
     255        elif parent == u'SASsource': 
     256            self.current_dataset.source = self.source 
     257            self.source = Source() 
     258        elif parent == u'SASsample': 
     259            self.current_dataset.sample = self.sample 
     260            self.sample = Sample() 
     261        elif parent == u'SAScollimation': 
     262            self.current_dataset.collimation.append(self.collimation) 
     263            self.collimation = Collimation() 
     264        elif parent == u'SASaperture': 
     265            self.collimation.aperture.append(self.aperture) 
     266            self.aperture = Aperture() 
    219267 
    220268    def final_data_cleanup(self): 
     
    222270        Does some final cleanup and formatting on self.current_dataset 
    223271        """ 
    224         ## TODO: Add all cleanup items - NOT FINISHED 
    225         ## TODO: All strings to float64 
    226         ## TODO: All intermediates (self.sample, etc.) put in self.current_dataset 
    227272 
    228273        ## Type cast data arrays to float64 and find min/max as appropriate 
     
    292337                self.current_dataset.dy =self.current_dataset.dy.astype(np.float64) 
    293338 
     339        if len(self.current_dataset.trans_spectrum) is not 0: 
     340            spectrum_list = [] 
     341            for spectrum in self.current_dataset.trans_spectrum: 
     342                spectrum.transmission = np.delete(spectrum.transmission, [0]) 
     343                spectrum.transmission = spectrum.transmission.astype(np.float64) 
     344                spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0]) 
     345                spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64) 
     346                spectrum.wavelength = np.delete(spectrum.wavelength, [0]) 
     347                spectrum.wavelength = spectrum.wavelength.astype(np.float64) 
     348                spectrum_list.append(spectrum) 
     349            self.current_dataset.trans_spectrum = spectrum_list 
     350 
    294351        else: 
    295352            self.errors.add("ShouldNeverHappenException") 
     
    325382        :return: None 
    326383        """ 
    327         entry = [] 
    328         if key is not "": 
    329             entry = self.raw_data.get(key) 
    330         else: 
    331             key_prog = re.compile("sasentry*") 
    332             for key in self.raw_data.keys(): 
    333                 if (key_prog.match(key)): 
    334                     entry = self.raw_data.get(key) 
    335                     break 
     384        entry = self._find_intermediate(key, "sasentry*") 
    336385        data = entry.get("sasdata") 
    337386        if data.get("Qx") is not None: 
     
    343392        self.current_dataset.filename = self.raw_data.filename 
    344393 
     394    def _find_intermediate(self, key="", basename=""): 
     395        """ 
     396        A private class used to find an entry by either using a direct key or knowing the approximate basename. 
     397 
     398        :param key: Exact keyname of an entry 
     399        :param basename: Approximate name of an entry 
     400        :return: 
     401        """ 
     402        entry = [] 
     403        if key is not "": 
     404            entry = self.raw_data.get(key) 
     405        else: 
     406            key_prog = re.compile(basename) 
     407            for key in self.raw_data.keys(): 
     408                if (key_prog.match(key)): 
     409                    entry = self.raw_data.get(key) 
     410                    break 
     411        return entry 
     412 
    345413    def _create_unique_key(self, dictionary, name, numb=0): 
    346414        """ 
Note: See TracChangeset for help on using the changeset viewer.