source: sasview/src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py @ 18d5af02

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalc
Last change on this file since 18d5af02 was c416a17, checked in by Piotr Rozyczko <rozyczko@…>, 8 years ago

Merge branch 'master' into ESS_GUI

  • Property mode set to 100644
File size: 26.9 KB
RevLine 
[68aa210]1"""
2    CanSAS 2D data reader for reading HDF5 formatted CanSAS files.
3"""
4
5import h5py
6import numpy as np
7import re
8import os
9import sys
10
[082239e]11from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\
12    Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \
13    TransmissionSpectrum, Detector
[132db16]14from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable
15
[68aa210]16
17class Reader():
18    """
[082239e]19    A class for reading in CanSAS v2.0 data files. The existing iteration opens
20    Mantid generated HDF5 formatted files with file extension .h5/.H5. Any
21    number of data sets may be present within the file and any dimensionality
22    of data may be used. Currently 1D and 2D SAS data sets are supported, but
23    future implementations will include 1D and 2D SESANS data.
[d72567e]24
[082239e]25    Any number of SASdata sets may be present in a SASentry and the data within
26    can be either 1D I(Q) or 2D I(Qx, Qy).
[5e906207]27    Also supports reading NXcanSAS formatted HDF5 files
[68aa210]28
29    :Dependencies:
[132db16]30        The CanSAS HDF5 reader requires h5py => v2.5.0 or later.
[68aa210]31    """
32
[082239e]33    # CanSAS version
[68aa210]34    cansas_version = 2.0
[082239e]35    # Logged warnings or messages
[68aa210]36    logging = None
[082239e]37    # List of errors for the current data set
[68aa210]38    errors = None
[082239e]39    # Raw file contents to be processed
[68aa210]40    raw_data = None
[082239e]41    # Data info currently being read in
[132db16]42    current_datainfo = None
[082239e]43    # SASdata set currently being read in
[68aa210]44    current_dataset = None
[082239e]45    # List of plottable1D objects that should be linked to the current_datainfo
[132db16]46    data1d = None
[082239e]47    # List of plottable2D objects that should be linked to the current_datainfo
[132db16]48    data2d = None
[082239e]49    # Data type name
[ad52d31]50    type_name = "CanSAS 2.0"
[082239e]51    # Wildcards
[ad52d31]52    type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"]
[082239e]53    # List of allowed extensions
[68aa210]54    ext = ['.h5', '.H5']
[082239e]55    # Flag to bypass extension check
[54544637]56    allow_all = True
[082239e]57    # List of files to return
[68aa210]58    output = None
59
60    def read(self, filename):
61        """
[ad52d31]62        This is the general read method that all SasView data_loaders must have.
[68aa210]63
64        :param filename: A path for an HDF5 formatted CanSAS 2D data file.
[132db16]65        :return: List of Data1D/2D objects and/or a list of errors.
[68aa210]66        """
[082239e]67        # Reinitialize when loading a new data file to reset all class variables
[132db16]68        self.reset_class_variables()
[082239e]69        # Check that the file exists
[68aa210]70        if os.path.isfile(filename):
71            basename = os.path.basename(filename)
72            _, extension = os.path.splitext(basename)
73            # If the file type is not allowed, return empty list
74            if extension in self.ext or self.allow_all:
[082239e]75                # Load the data file
[68aa210]76                self.raw_data = h5py.File(filename, 'r')
[082239e]77                # Read in all child elements of top level SASroot
[132db16]78                self.read_children(self.raw_data, [])
[082239e]79                # Add the last data set to the list of outputs
[68aa210]80                self.add_data_set()
[082239e]81                # Close the data file
[995f4eb]82                self.raw_data.close()
[082239e]83        # Return data set(s)
[68aa210]84        return self.output
85
[132db16]86    def reset_class_variables(self):
87        """
88        Create the reader object and define initial states for class variables
89        """
90        self.current_datainfo = None
91        self.current_dataset = None
92        self.data1d = []
93        self.data2d = []
94        self.raw_data = None
95        self.errors = set()
96        self.logging = []
97        self.output = []
98        self.parent_class = u''
99        self.detector = Detector()
100        self.collimation = Collimation()
101        self.aperture = Aperture()
102        self.process = Process()
103        self.trans_spectrum = TransmissionSpectrum()
104
105    def read_children(self, data, parent_list):
[68aa210]106        """
[ad52d31]107        A recursive method for stepping through the hierarchical data file.
[68aa210]108
109        :param data: h5py Group object of any kind
110        :param parent: h5py Group parent name
111        """
112
[082239e]113        # Loop through each element of the parent and process accordingly
[68aa210]114        for key in data.keys():
[082239e]115            # Get all information for the current key
[68aa210]116            value = data.get(key)
[d398285]117            if value.attrs.get(u'canSAS_class') is not None:
118                class_name = value.attrs.get(u'canSAS_class')
119            else:
120                class_name = value.attrs.get(u'NX_class')
[68aa210]121            if class_name is not None:
122                class_prog = re.compile(class_name)
123            else:
124                class_prog = re.compile(value.name)
125
126            if isinstance(value, h5py.Group):
[132db16]127                self.parent_class = class_name
128                parent_list.append(key)
[082239e]129                # If a new sasentry, store the current data sets and create
130                # a fresh Data1D/2D object
[68aa210]131                if class_prog.match(u'SASentry'):
132                    self.add_data_set(key)
[132db16]133                elif class_prog.match(u'SASdata'):
134                    self._initialize_new_data_set(parent_list)
[082239e]135                # Recursion step to access data within the group
[132db16]136                self.read_children(value, parent_list)
137                self.add_intermediate()
138                parent_list.remove(key)
[68aa210]139
140            elif isinstance(value, h5py.Dataset):
[082239e]141                # If this is a dataset, store the data appropriately
[68aa210]142                data_set = data[key][:]
[b76cf0d]143                unit = self._get_unit(value)
[a9d3abb]144
[082239e]145                # I and Q Data
[b76cf0d]146                if key == u'I':
[082239e]147                    if isinstance(self.current_dataset, plottable_2D):
[a9d3abb]148                        self.current_dataset.data = data_set
[b76cf0d]149                        self.current_dataset.zaxis("Intensity", unit)
150                    else:
151                        self.current_dataset.y = data_set.flatten()
152                        self.current_dataset.yaxis("Intensity", unit)
153                    continue
154                elif key == u'Idev':
[082239e]155                    if isinstance(self.current_dataset, plottable_2D):
[b76cf0d]156                        self.current_dataset.err_data = data_set.flatten()
157                    else:
158                        self.current_dataset.dy = data_set.flatten()
159                    continue
160                elif key == u'Q':
161                    self.current_dataset.xaxis("Q", unit)
[082239e]162                    if isinstance(self.current_dataset, plottable_2D):
[b76cf0d]163                        self.current_dataset.q = data_set.flatten()
164                    else:
165                        self.current_dataset.x = data_set.flatten()
166                    continue
167                elif key == u'Qy':
168                    self.current_dataset.yaxis("Q_y", unit)
169                    self.current_dataset.qy_data = data_set.flatten()
170                    continue
171                elif key == u'Qydev':
172                    self.current_dataset.dqy_data = data_set.flatten()
173                    continue
174                elif key == u'Qx':
175                    self.current_dataset.xaxis("Q_x", unit)
176                    self.current_dataset.qx_data = data_set.flatten()
177                    continue
178                elif key == u'Qxdev':
179                    self.current_dataset.dqx_data = data_set.flatten()
180                    continue
181                elif key == u'Mask':
182                    self.current_dataset.mask = data_set.flatten()
183                    continue
[082239e]184                # Transmission Spectrum
[c94280c]185                elif (key == u'T'
186                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]187                    self.trans_spectrum.transmission = data_set.flatten()
188                    continue
[c94280c]189                elif (key == u'Tdev'
190                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]191                    self.trans_spectrum.transmission_deviation = \
192                        data_set.flatten()
193                    continue
[c94280c]194                elif (key == u'lambda'
195                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]196                    self.trans_spectrum.wavelength = data_set.flatten()
197                    continue
[68aa210]198
199                for data_point in data_set:
[082239e]200                    # Top Level Meta Data
[68aa210]201                    if key == u'definition':
[132db16]202                        self.current_datainfo.meta_data['reader'] = data_point
[68aa210]203                    elif key == u'run':
[132db16]204                        self.current_datainfo.run.append(data_point)
[68aa210]205                    elif key == u'title':
[132db16]206                        self.current_datainfo.title = data_point
[68aa210]207                    elif key == u'SASnote':
[132db16]208                        self.current_datainfo.notes.append(data_point)
[68aa210]209
[082239e]210                    # Sample Information
211                    # CanSAS 2.0 format
212                    elif key == u'Title' and self.parent_class == u'SASsample':
[d72567e]213                        self.current_datainfo.sample.name = data_point
[082239e]214                    # NXcanSAS format
215                    elif key == u'name' and self.parent_class == u'SASsample':
[d72567e]216                        self.current_datainfo.sample.name = data_point
[082239e]217                    # NXcanSAS format
218                    elif key == u'ID' and self.parent_class == u'SASsample':
[132db16]219                        self.current_datainfo.sample.name = data_point
[c94280c]220                    elif (key == u'thickness'
221                          and self.parent_class == u'SASsample'):
[132db16]222                        self.current_datainfo.sample.thickness = data_point
[c94280c]223                    elif (key == u'temperature'
224                          and self.parent_class == u'SASsample'):
[132db16]225                        self.current_datainfo.sample.temperature = data_point
[c94280c]226                    elif (key == u'transmission'
227                          and self.parent_class == u'SASsample'):
[5e906207]228                        self.current_datainfo.sample.transmission = data_point
[c94280c]229                    elif (key == u'x_position'
230                          and self.parent_class == u'SASsample'):
[5e906207]231                        self.current_datainfo.sample.position.x = data_point
[c94280c]232                    elif (key == u'y_position'
233                          and self.parent_class == u'SASsample'):
[5e906207]234                        self.current_datainfo.sample.position.y = data_point
[082239e]235                    elif key == u'pitch' and self.parent_class == u'SASsample':
[5e906207]236                        self.current_datainfo.sample.orientation.x = data_point
[082239e]237                    elif key == u'yaw' and self.parent_class == u'SASsample':
238                        self.current_datainfo.sample.orientation.y = data_point
239                    elif key == u'roll' and self.parent_class == u'SASsample':
[5e906207]240                        self.current_datainfo.sample.orientation.z = data_point
[c94280c]241                    elif (key == u'details'
242                          and self.parent_class == u'SASsample'):
[5e906207]243                        self.current_datainfo.sample.details.append(data_point)
[68aa210]244
[082239e]245                    # Instrumental Information
[c94280c]246                    elif (key == u'name'
247                          and self.parent_class == u'SASinstrument'):
[132db16]248                        self.current_datainfo.instrument = data_point
249                    elif key == u'name' and self.parent_class == u'SASdetector':
[ad52d31]250                        self.detector.name = data_point
[132db16]251                    elif key == u'SDD' and self.parent_class == u'SASdetector':
252                        self.detector.distance = float(data_point)
[d398285]253                        self.detector.distance_unit = unit
[c94280c]254                    elif (key == u'slit_length'
255                          and self.parent_class == u'SASdetector'):
[5e906207]256                        self.detector.slit_length = float(data_point)
257                        self.detector.slit_length_unit = unit
[c94280c]258                    elif (key == u'x_position'
259                          and self.parent_class == u'SASdetector'):
[5e906207]260                        self.detector.offset.x = float(data_point)
261                        self.detector.offset_unit = unit
[c94280c]262                    elif (key == u'y_position'
263                          and self.parent_class == u'SASdetector'):
[5e906207]264                        self.detector.offset.y = float(data_point)
265                        self.detector.offset_unit = unit
[c94280c]266                    elif (key == u'pitch'
267                          and self.parent_class == u'SASdetector'):
[5e906207]268                        self.detector.orientation.x = float(data_point)
269                        self.detector.orientation_unit = unit
[082239e]270                    elif key == u'roll' and self.parent_class == u'SASdetector':
[5e906207]271                        self.detector.orientation.z = float(data_point)
272                        self.detector.orientation_unit = unit
[082239e]273                    elif key == u'yaw' and self.parent_class == u'SASdetector':
274                        self.detector.orientation.y = float(data_point)
275                        self.detector.orientation_unit = unit
[c94280c]276                    elif (key == u'beam_center_x'
277                          and self.parent_class == u'SASdetector'):
[5e906207]278                        self.detector.beam_center.x = float(data_point)
279                        self.detector.beam_center_unit = unit
[c94280c]280                    elif (key == u'beam_center_y'
281                          and self.parent_class == u'SASdetector'):
[5e906207]282                        self.detector.beam_center.y = float(data_point)
283                        self.detector.beam_center_unit = unit
[c94280c]284                    elif (key == u'x_pixel_size'
285                          and self.parent_class == u'SASdetector'):
[5e906207]286                        self.detector.pixel_size.x = float(data_point)
287                        self.detector.pixel_size_unit = unit
[c94280c]288                    elif (key == u'y_pixel_size'
289                          and self.parent_class == u'SASdetector'):
[5e906207]290                        self.detector.pixel_size.y = float(data_point)
291                        self.detector.pixel_size_unit = unit
[c94280c]292                    elif (key == u'distance'
293                          and self.parent_class == u'SAScollimation'):
[ad52d31]294                        self.collimation.length = data_point
[d398285]295                        self.collimation.length_unit = unit
[c94280c]296                    elif (key == u'name'
297                          and self.parent_class == u'SAScollimation'):
[ad52d31]298                        self.collimation.name = data_point
[c94280c]299                    elif (key == u'shape'
300                          and self.parent_class == u'SASaperture'):
[082239e]301                        self.aperture.shape = data_point
[c94280c]302                    elif (key == u'x_gap'
303                          and self.parent_class == u'SASaperture'):
[082239e]304                        self.aperture.size.x = data_point
[c94280c]305                    elif (key == u'y_gap'
306                          and self.parent_class == u'SASaperture'):
[082239e]307                        self.aperture.size.y = data_point
308
309                    # Process Information
[c94280c]310                    elif (key == u'Title'
311                          and self.parent_class == u'SASprocess'): # CanSAS 2.0
[68aa210]312                        self.process.name = data_point
[c94280c]313                    elif (key == u'name'
314                          and self.parent_class == u'SASprocess'): # NXcanSAS
[68aa210]315                        self.process.name = data_point
[c94280c]316                    elif (key == u'description'
317                          and self.parent_class == u'SASprocess'):
[68aa210]318                        self.process.description = data_point
[132db16]319                    elif key == u'date' and self.parent_class == u'SASprocess':
[68aa210]320                        self.process.date = data_point
[082239e]321                    elif key == u'term' and self.parent_class == u'SASprocess':
322                        self.process.term = data_point
[132db16]323                    elif self.parent_class == u'SASprocess':
[ad52d31]324                        self.process.notes.append(data_point)
325
[082239e]326                    # Source
[c94280c]327                    elif (key == u'wavelength'
328                          and self.parent_class == u'SASdata'):
[132db16]329                        self.current_datainfo.source.wavelength = data_point
[5e906207]330                        self.current_datainfo.source.wavelength_unit = unit
[c94280c]331                    elif (key == u'incident_wavelength'
332                          and self.parent_class == 'SASsource'):
[5e906207]333                        self.current_datainfo.source.wavelength = data_point
334                        self.current_datainfo.source.wavelength_unit = unit
[c94280c]335                    elif (key == u'wavelength_max'
336                          and self.parent_class == u'SASsource'):
[5e906207]337                        self.current_datainfo.source.wavelength_max = data_point
338                        self.current_datainfo.source.wavelength_max_unit = unit
[c94280c]339                    elif (key == u'wavelength_min'
340                          and self.parent_class == u'SASsource'):
[5e906207]341                        self.current_datainfo.source.wavelength_min = data_point
342                        self.current_datainfo.source.wavelength_min_unit = unit
[c94280c]343                    elif (key == u'incident_wavelength_spread'
344                          and self.parent_class == u'SASsource'):
[082239e]345                        self.current_datainfo.source.wavelength_spread = \
346                            data_point
347                        self.current_datainfo.source.wavelength_spread_unit = \
348                            unit
[c94280c]349                    elif (key == u'beam_size_x'
350                          and self.parent_class == u'SASsource'):
[5e906207]351                        self.current_datainfo.source.beam_size.x = data_point
352                        self.current_datainfo.source.beam_size_unit = unit
[c94280c]353                    elif (key == u'beam_size_y'
354                          and self.parent_class == u'SASsource'):
[5e906207]355                        self.current_datainfo.source.beam_size.y = data_point
356                        self.current_datainfo.source.beam_size_unit = unit
[c94280c]357                    elif (key == u'beam_shape'
358                          and self.parent_class == u'SASsource'):
[5e906207]359                        self.current_datainfo.source.beam_shape = data_point
[c94280c]360                    elif (key == u'radiation'
361                          and self.parent_class == u'SASsource'):
[132db16]362                        self.current_datainfo.source.radiation = data_point
[c94280c]363                    elif (key == u'transmission'
364                          and self.parent_class == u'SASdata'):
[132db16]365                        self.current_datainfo.sample.transmission = data_point
[68aa210]366
[082239e]367                    # Everything else goes in meta_data
[68aa210]368                    else:
[082239e]369                        new_key = self._create_unique_key(
370                            self.current_datainfo.meta_data, key)
[132db16]371                        self.current_datainfo.meta_data[new_key] = data_point
[68aa210]372
373            else:
[082239e]374                # I don't know if this reachable code
[68aa210]375                self.errors.add("ShouldNeverHappenException")
376
[132db16]377    def add_intermediate(self):
[ad52d31]378        """
[082239e]379        This method stores any intermediate objects within the final data set
380        after fully reading the set.
[ad52d31]381
[082239e]382        :param parent: The NXclass name for the h5py Group object that just
383                       finished being processed
[ad52d31]384        """
385
[132db16]386        if self.parent_class == u'SASprocess':
387            self.current_datainfo.process.append(self.process)
[ad52d31]388            self.process = Process()
[132db16]389        elif self.parent_class == u'SASdetector':
390            self.current_datainfo.detector.append(self.detector)
[ad52d31]391            self.detector = Detector()
[132db16]392        elif self.parent_class == u'SAStransmission_spectrum':
393            self.current_datainfo.trans_spectrum.append(self.trans_spectrum)
[ad52d31]394            self.trans_spectrum = TransmissionSpectrum()
[132db16]395        elif self.parent_class == u'SAScollimation':
396            self.current_datainfo.collimation.append(self.collimation)
[ad52d31]397            self.collimation = Collimation()
[132db16]398        elif self.parent_class == u'SASaperture':
[ad52d31]399            self.collimation.aperture.append(self.aperture)
400            self.aperture = Aperture()
[132db16]401        elif self.parent_class == u'SASdata':
[082239e]402            if isinstance(self.current_dataset, plottable_2D):
[132db16]403                self.data2d.append(self.current_dataset)
[082239e]404            elif isinstance(self.current_dataset, plottable_1D):
[132db16]405                self.data1d.append(self.current_dataset)
[68aa210]406
407    def final_data_cleanup(self):
408        """
[082239e]409        Does some final cleanup and formatting on self.current_datainfo and
410        all data1D and data2D objects and then combines the data and info into
411        Data1D and Data2D objects
[68aa210]412        """
413
[082239e]414        # Type cast data arrays to float64
[132db16]415        if len(self.current_datainfo.trans_spectrum) > 0:
[ad52d31]416            spectrum_list = []
[132db16]417            for spectrum in self.current_datainfo.trans_spectrum:
[ad52d31]418                spectrum.transmission = np.delete(spectrum.transmission, [0])
419                spectrum.transmission = spectrum.transmission.astype(np.float64)
[082239e]420                spectrum.transmission_deviation = np.delete(
421                    spectrum.transmission_deviation, [0])
422                spectrum.transmission_deviation = \
423                    spectrum.transmission_deviation.astype(np.float64)
[ad52d31]424                spectrum.wavelength = np.delete(spectrum.wavelength, [0])
425                spectrum.wavelength = spectrum.wavelength.astype(np.float64)
[132db16]426                if len(spectrum.transmission) > 0:
427                    spectrum_list.append(spectrum)
428            self.current_datainfo.trans_spectrum = spectrum_list
[68aa210]429
[082239e]430        # Append errors to dataset and reset class errors
[132db16]431        self.current_datainfo.errors = self.errors
[68aa210]432        self.errors.clear()
433
[082239e]434        # Combine all plottables with datainfo and append each to output
435        # Type cast data arrays to float64 and find min/max as appropriate
[132db16]436        for dataset in self.data2d:
437            dataset.data = dataset.data.astype(np.float64)
438            dataset.err_data = dataset.err_data.astype(np.float64)
439            if dataset.qx_data is not None:
440                dataset.xmin = np.min(dataset.qx_data)
441                dataset.xmax = np.max(dataset.qx_data)
442                dataset.qx_data = dataset.qx_data.astype(np.float64)
443            if dataset.dqx_data is not None:
444                dataset.dqx_data = dataset.dqx_data.astype(np.float64)
445            if dataset.qy_data is not None:
446                dataset.ymin = np.min(dataset.qy_data)
447                dataset.ymax = np.max(dataset.qy_data)
448                dataset.qy_data = dataset.qy_data.astype(np.float64)
449            if dataset.dqy_data is not None:
450                dataset.dqy_data = dataset.dqy_data.astype(np.float64)
451            if dataset.q_data is not None:
452                dataset.q_data = dataset.q_data.astype(np.float64)
453            zeros = np.ones(dataset.data.size, dtype=bool)
454            try:
[082239e]455                for i in range(0, dataset.mask.size - 1):
[132db16]456                    zeros[i] = dataset.mask[i]
457            except:
458                self.errors.add(sys.exc_value)
459            dataset.mask = zeros
[082239e]460            # Calculate the actual Q matrix
[132db16]461            try:
462                if dataset.q_data.size <= 1:
[54544637]463                    dataset.q_data = np.sqrt(dataset.qx_data
464                                             * dataset.qx_data
465                                             + dataset.qy_data
466                                             * dataset.qy_data)
[132db16]467            except:
468                dataset.q_data = None
[a9d3abb]469
470            if dataset.data.ndim == 2:
471                (n_rows, n_cols) = dataset.data.shape
[1b06e13a]472                dataset.y_bins = dataset.qy_data[0::n_cols]
[a9d3abb]473                dataset.x_bins = dataset.qx_data[:n_cols]
474                dataset.data = dataset.data.flatten()
475
[082239e]476            final_dataset = combine_data_info_with_plottable(
477                dataset, self.current_datainfo)
[132db16]478            self.output.append(final_dataset)
479
480        for dataset in self.data1d:
481            if dataset.x is not None:
482                dataset.x = dataset.x.astype(np.float64)
483                dataset.xmin = np.min(dataset.x)
484                dataset.xmax = np.max(dataset.x)
485            if dataset.y is not None:
486                dataset.y = dataset.y.astype(np.float64)
487                dataset.ymin = np.min(dataset.y)
488                dataset.ymax = np.max(dataset.y)
489            if dataset.dx is not None:
490                dataset.dx = dataset.dx.astype(np.float64)
491            if dataset.dxl is not None:
492                dataset.dxl = dataset.dxl.astype(np.float64)
493            if dataset.dxw is not None:
494                dataset.dxw = dataset.dxw.astype(np.float64)
495            if dataset.dy is not None:
496                dataset.dy = dataset.dy.astype(np.float64)
[082239e]497            final_dataset = combine_data_info_with_plottable(
498                dataset, self.current_datainfo)
[132db16]499            self.output.append(final_dataset)
500
[68aa210]501    def add_data_set(self, key=""):
502        """
[082239e]503        Adds the current_dataset to the list of outputs after preforming final
504        processing on the data and then calls a private method to generate a
505        new data set.
[68aa210]506
507        :param key: NeXus group name for current tree level
508        """
[132db16]509
510        if self.current_datainfo and self.current_dataset:
[68aa210]511            self.final_data_cleanup()
[132db16]512        self.data1d = []
513        self.data2d = []
514        self.current_datainfo = DataInfo()
[68aa210]515
[54ba66e]516
[082239e]517    def _initialize_new_data_set(self, parent_list=None):
[68aa210]518        """
[082239e]519        A private class method to generate a new 1D or 2D data object based on
520        the type of data within the set. Outside methods should call
521        add_data_set() to be sure any existing data is stored properly.
[68aa210]522
[132db16]523        :param parent_list: List of names of parent elements
[68aa210]524        """
[132db16]525
526        if parent_list is None:
527            parent_list = []
528        if self._find_intermediate(parent_list, "Qx"):
529            self.current_dataset = plottable_2D()
[68aa210]530        else:
531            x = np.array(0)
532            y = np.array(0)
[132db16]533            self.current_dataset = plottable_1D(x, y)
534        self.current_datainfo.filename = self.raw_data.filename
[68aa210]535
[132db16]536    def _find_intermediate(self, parent_list, basename=""):
[ad52d31]537        """
[082239e]538        A private class used to find an entry by either using a direct key or
539        knowing the approximate basename.
[ad52d31]540
[082239e]541        :param parent_list: List of parents nodes in the HDF5 file
[132db16]542        :param basename: Approximate name of an entry to search for
[ad52d31]543        :return:
544        """
[132db16]545
546        entry = False
547        key_prog = re.compile(basename)
548        top = self.raw_data
549        for parent in parent_list:
550            top = top.get(parent)
551        for key in top.keys():
[082239e]552            if key_prog.match(key):
[132db16]553                entry = True
554                break
[ad52d31]555        return entry
556
[68aa210]557    def _create_unique_key(self, dictionary, name, numb=0):
558        """
559        Create a unique key value for any dictionary to prevent overwriting
560        Recurses until a unique key value is found.
561
562        :param dictionary: A dictionary with any number of entries
563        :param name: The index of the item to be added to dictionary
564        :param numb: The number to be appended to the name, starts at 0
[132db16]565        :return: The new name for the dictionary entry
[68aa210]566        """
567        if dictionary.get(name) is not None:
568            numb += 1
569            name = name.split("_")[0]
570            name += "_{0}".format(numb)
571            name = self._create_unique_key(dictionary, name, numb)
[d398285]572        return name
573
574    def _get_unit(self, value):
575        """
576        Find the unit for a particular value within the h5py dictionary
577
578        :param value: attribute dictionary for a particular value set
[132db16]579        :return: unit for the value passed to the method
[d398285]580        """
581        unit = value.attrs.get(u'units')
[54544637]582        if unit is None:
[d398285]583            unit = value.attrs.get(u'unit')
[082239e]584        # Convert the unit formats
[d398285]585        if unit == "1/A":
586            unit = "A^{-1}"
587        elif unit == "1/cm":
588            unit = "cm^{-1}"
[54ba66e]589        return unit
Note: See TracBrowser for help on using the repository browser.