source: sasview/src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py @ 7b50f14

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalcmagnetic_scattrelease-4.2.2ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249ticket885unittest-saveload
Last change on this file since 7b50f14 was 7b50f14, checked in by Paul Kienzle <pkienzle@…>, 7 years ago

restore python 2 functionality

  • Property mode set to 100644
File size: 26.8 KB
RevLine 
[68aa210]1"""
2    CanSAS 2D data reader for reading HDF5 formatted CanSAS files.
3"""
4
5import h5py
6import numpy as np
7import re
8import os
9import sys
10
[7b50f14]11from ..data_info import plottable_1D, plottable_2D,\
[082239e]12    Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \
13    TransmissionSpectrum, Detector
[7b50f14]14from ..data_info import combine_data_info_with_plottable
15from ..loader_exceptions import FileContentsException, DefaultReaderException
16from ..file_reader_base_class import FileReader, decode
[5c5e7fd]17
18def h5attr(node, key, default=None):
19    return decode(node.attrs.get(key, default))
[68aa210]20
[9d786e5]21class Reader(FileReader):
[68aa210]22    """
[082239e]23    A class for reading in CanSAS v2.0 data files. The existing iteration opens
24    Mantid generated HDF5 formatted files with file extension .h5/.H5. Any
25    number of data sets may be present within the file and any dimensionality
26    of data may be used. Currently 1D and 2D SAS data sets are supported, but
27    future implementations will include 1D and 2D SESANS data.
[d72567e]28
[082239e]29    Any number of SASdata sets may be present in a SASentry and the data within
30    can be either 1D I(Q) or 2D I(Qx, Qy).
[68aa210]31
[5e906207]32    Also supports reading NXcanSAS formatted HDF5 files
33
[68aa210]34    :Dependencies:
[d72567e]35        The CanSAS HDF5 reader requires h5py => v2.5.0 or later.
[68aa210]36    """
37
[082239e]38    # CanSAS version
[68aa210]39    cansas_version = 2.0
[082239e]40    # Logged warnings or messages
[68aa210]41    logging = None
[082239e]42    # List of errors for the current data set
[68aa210]43    errors = None
[082239e]44    # Raw file contents to be processed
[68aa210]45    raw_data = None
[082239e]46    # List of plottable1D objects that should be linked to the current_datainfo
[d72567e]47    data1d = None
[082239e]48    # List of plottable2D objects that should be linked to the current_datainfo
[d72567e]49    data2d = None
[082239e]50    # Data type name
[ad52d31]51    type_name = "CanSAS 2.0"
[082239e]52    # Wildcards
[ad52d31]53    type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"]
[082239e]54    # List of allowed extensions
[68aa210]55    ext = ['.h5', '.H5']
[082239e]56    # Flag to bypass extension check
[54544637]57    allow_all = True
[68aa210]58
[9d786e5]59    def get_file_contents(self):
[68aa210]60        """
[ad52d31]61        This is the general read method that all SasView data_loaders must have.
[68aa210]62
63        :param filename: A path for an HDF5 formatted CanSAS 2D data file.
[d72567e]64        :return: List of Data1D/2D objects and/or a list of errors.
[68aa210]65        """
[082239e]66        # Reinitialize when loading a new data file to reset all class variables
[d72567e]67        self.reset_class_variables()
[9d786e5]68
69        filename = self.f_open.name
70        self.f_open.close() # IO handled by h5py
71
[082239e]72        # Check that the file exists
[68aa210]73        if os.path.isfile(filename):
74            basename = os.path.basename(filename)
75            _, extension = os.path.splitext(basename)
76            # If the file type is not allowed, return empty list
77            if extension in self.ext or self.allow_all:
[082239e]78                # Load the data file
[7f75a3f]79                try:
80                    self.raw_data = h5py.File(filename, 'r')
81                except Exception as e:
[8dec7e7]82                    if extension not in self.ext:
83                        msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension)
84                        raise DefaultReaderException(msg)
85                    raise FileContentsException(e.message)
[dcb91cf]86                try:
87                    # Read in all child elements of top level SASroot
88                    self.read_children(self.raw_data, [])
89                    # Add the last data set to the list of outputs
90                    self.add_data_set()
91                except Exception as exc:
92                    raise FileContentsException(exc.message)
93                finally:
94                    # Close the data file
95                    self.raw_data.close()
96
97                for dataset in self.output:
98                    if isinstance(dataset, Data1D):
99                        if dataset.x.size < 5:
100                            self.output = []
101                            raise FileContentsException("Fewer than 5 data points found.")
[68aa210]102
[d72567e]103    def reset_class_variables(self):
104        """
105        Create the reader object and define initial states for class variables
106        """
107        self.current_datainfo = None
108        self.current_dataset = None
109        self.data1d = []
110        self.data2d = []
111        self.raw_data = None
112        self.errors = set()
113        self.logging = []
114        self.output = []
115        self.parent_class = u''
116        self.detector = Detector()
117        self.collimation = Collimation()
118        self.aperture = Aperture()
119        self.process = Process()
120        self.trans_spectrum = TransmissionSpectrum()
121
122    def read_children(self, data, parent_list):
[68aa210]123        """
[ad52d31]124        A recursive method for stepping through the hierarchical data file.
[68aa210]125
126        :param data: h5py Group object of any kind
127        :param parent: h5py Group parent name
128        """
129
[082239e]130        # Loop through each element of the parent and process accordingly
[68aa210]131        for key in data.keys():
[082239e]132            # Get all information for the current key
[68aa210]133            value = data.get(key)
[7b50f14]134            class_name = h5attr(value, u'canSAS_class')
135            if class_name is None:
[5c5e7fd]136                class_name = h5attr(value, u'NX_class')
[68aa210]137            if class_name is not None:
138                class_prog = re.compile(class_name)
139            else:
140                class_prog = re.compile(value.name)
141
142            if isinstance(value, h5py.Group):
[c9ecd1b]143                # Set parent class before recursion
[d72567e]144                self.parent_class = class_name
145                parent_list.append(key)
[082239e]146                # If a new sasentry, store the current data sets and create
147                # a fresh Data1D/2D object
[68aa210]148                if class_prog.match(u'SASentry'):
149                    self.add_data_set(key)
[d72567e]150                elif class_prog.match(u'SASdata'):
151                    self._initialize_new_data_set(parent_list)
[082239e]152                # Recursion step to access data within the group
[d72567e]153                self.read_children(value, parent_list)
[c9ecd1b]154                # Reset parent class when returning from recursive method
155                self.parent_class = class_name
[d72567e]156                self.add_intermediate()
157                parent_list.remove(key)
[68aa210]158
159            elif isinstance(value, h5py.Dataset):
[082239e]160                # If this is a dataset, store the data appropriately
[68aa210]161                data_set = data[key][:]
[7bd6860a]162                unit = self._get_unit(value)
[ac370c5]163
[082239e]164                # I and Q Data
[7bd6860a]165                if key == u'I':
[082239e]166                    if isinstance(self.current_dataset, plottable_2D):
[ac370c5]167                        self.current_dataset.data = data_set
[7bd6860a]168                        self.current_dataset.zaxis("Intensity", unit)
169                    else:
170                        self.current_dataset.y = data_set.flatten()
171                        self.current_dataset.yaxis("Intensity", unit)
172                    continue
173                elif key == u'Idev':
[082239e]174                    if isinstance(self.current_dataset, plottable_2D):
[7bd6860a]175                        self.current_dataset.err_data = data_set.flatten()
176                    else:
177                        self.current_dataset.dy = data_set.flatten()
178                    continue
179                elif key == u'Q':
180                    self.current_dataset.xaxis("Q", unit)
[082239e]181                    if isinstance(self.current_dataset, plottable_2D):
[7bd6860a]182                        self.current_dataset.q = data_set.flatten()
183                    else:
184                        self.current_dataset.x = data_set.flatten()
185                    continue
[bbd0f37]186                elif key == u'Qdev':
187                    self.current_dataset.dx = data_set.flatten()
188                    continue
[082239e]189                elif key == u'dQw':
190                    self.current_dataset.dxw = data_set.flatten()
191                    continue
192                elif key == u'dQl':
193                    self.current_dataset.dxl = data_set.flatten()
194                    continue
[7bd6860a]195                elif key == u'Qy':
196                    self.current_dataset.yaxis("Q_y", unit)
197                    self.current_dataset.qy_data = data_set.flatten()
198                    continue
199                elif key == u'Qydev':
200                    self.current_dataset.dqy_data = data_set.flatten()
201                    continue
202                elif key == u'Qx':
203                    self.current_dataset.xaxis("Q_x", unit)
204                    self.current_dataset.qx_data = data_set.flatten()
205                    continue
206                elif key == u'Qxdev':
207                    self.current_dataset.dqx_data = data_set.flatten()
208                    continue
209                elif key == u'Mask':
210                    self.current_dataset.mask = data_set.flatten()
211                    continue
[082239e]212                # Transmission Spectrum
[c94280c]213                elif (key == u'T'
214                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]215                    self.trans_spectrum.transmission = data_set.flatten()
216                    continue
[c94280c]217                elif (key == u'Tdev'
218                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]219                    self.trans_spectrum.transmission_deviation = \
220                        data_set.flatten()
221                    continue
[c94280c]222                elif (key == u'lambda'
223                      and self.parent_class == u'SAStransmission_spectrum'):
[082239e]224                    self.trans_spectrum.wavelength = data_set.flatten()
225                    continue
[68aa210]226
227                for data_point in data_set:
[5c5e7fd]228                    if data_point.dtype.char == 'S':
229                        data_point = decode(bytes(data_point))
[082239e]230                    # Top Level Meta Data
[68aa210]231                    if key == u'definition':
[d72567e]232                        self.current_datainfo.meta_data['reader'] = data_point
[68aa210]233                    elif key == u'run':
[d72567e]234                        self.current_datainfo.run.append(data_point)
[be88076]235                        try:
[5c5e7fd]236                            run_name = h5attr(value, 'name')
[be88076]237                            run_dict = {data_point: run_name}
238                            self.current_datainfo.run_name = run_dict
[7b50f14]239                        except Exception:
[be88076]240                            pass
[68aa210]241                    elif key == u'title':
[d72567e]242                        self.current_datainfo.title = data_point
[68aa210]243                    elif key == u'SASnote':
[d72567e]244                        self.current_datainfo.notes.append(data_point)
[68aa210]245
[082239e]246                    # Sample Information
247                    # CanSAS 2.0 format
248                    elif key == u'Title' and self.parent_class == u'SASsample':
[d72567e]249                        self.current_datainfo.sample.name = data_point
[082239e]250                    # NXcanSAS format
251                    elif key == u'name' and self.parent_class == u'SASsample':
[88d85c6]252                        self.current_datainfo.sample.name = data_point
[082239e]253                    # NXcanSAS format
254                    elif key == u'ID' and self.parent_class == u'SASsample':
255                        self.current_datainfo.sample.name = data_point
[c94280c]256                    elif (key == u'thickness'
257                          and self.parent_class == u'SASsample'):
[d72567e]258                        self.current_datainfo.sample.thickness = data_point
[c94280c]259                    elif (key == u'temperature'
260                          and self.parent_class == u'SASsample'):
[d72567e]261                        self.current_datainfo.sample.temperature = data_point
[c94280c]262                    elif (key == u'transmission'
263                          and self.parent_class == u'SASsample'):
[5e906207]264                        self.current_datainfo.sample.transmission = data_point
[c94280c]265                    elif (key == u'x_position'
266                          and self.parent_class == u'SASsample'):
[5e906207]267                        self.current_datainfo.sample.position.x = data_point
[c94280c]268                    elif (key == u'y_position'
269                          and self.parent_class == u'SASsample'):
[5e906207]270                        self.current_datainfo.sample.position.y = data_point
[082239e]271                    elif key == u'pitch' and self.parent_class == u'SASsample':
[5e906207]272                        self.current_datainfo.sample.orientation.x = data_point
[082239e]273                    elif key == u'yaw' and self.parent_class == u'SASsample':
274                        self.current_datainfo.sample.orientation.y = data_point
275                    elif key == u'roll' and self.parent_class == u'SASsample':
[5e906207]276                        self.current_datainfo.sample.orientation.z = data_point
[c94280c]277                    elif (key == u'details'
278                          and self.parent_class == u'SASsample'):
[5e906207]279                        self.current_datainfo.sample.details.append(data_point)
[68aa210]280
[082239e]281                    # Instrumental Information
[c94280c]282                    elif (key == u'name'
283                          and self.parent_class == u'SASinstrument'):
[d72567e]284                        self.current_datainfo.instrument = data_point
285                    elif key == u'name' and self.parent_class == u'SASdetector':
[ad52d31]286                        self.detector.name = data_point
[d72567e]287                    elif key == u'SDD' and self.parent_class == u'SASdetector':
288                        self.detector.distance = float(data_point)
[d398285]289                        self.detector.distance_unit = unit
[c94280c]290                    elif (key == u'slit_length'
291                          and self.parent_class == u'SASdetector'):
[5e906207]292                        self.detector.slit_length = float(data_point)
293                        self.detector.slit_length_unit = unit
[c94280c]294                    elif (key == u'x_position'
295                          and self.parent_class == u'SASdetector'):
[5e906207]296                        self.detector.offset.x = float(data_point)
297                        self.detector.offset_unit = unit
[c94280c]298                    elif (key == u'y_position'
299                          and self.parent_class == u'SASdetector'):
[5e906207]300                        self.detector.offset.y = float(data_point)
301                        self.detector.offset_unit = unit
[c94280c]302                    elif (key == u'pitch'
303                          and self.parent_class == u'SASdetector'):
[5e906207]304                        self.detector.orientation.x = float(data_point)
305                        self.detector.orientation_unit = unit
[082239e]306                    elif key == u'roll' and self.parent_class == u'SASdetector':
[5e906207]307                        self.detector.orientation.z = float(data_point)
308                        self.detector.orientation_unit = unit
[082239e]309                    elif key == u'yaw' and self.parent_class == u'SASdetector':
310                        self.detector.orientation.y = float(data_point)
311                        self.detector.orientation_unit = unit
[c94280c]312                    elif (key == u'beam_center_x'
313                          and self.parent_class == u'SASdetector'):
[5e906207]314                        self.detector.beam_center.x = float(data_point)
315                        self.detector.beam_center_unit = unit
[c94280c]316                    elif (key == u'beam_center_y'
317                          and self.parent_class == u'SASdetector'):
[5e906207]318                        self.detector.beam_center.y = float(data_point)
319                        self.detector.beam_center_unit = unit
[c94280c]320                    elif (key == u'x_pixel_size'
321                          and self.parent_class == u'SASdetector'):
[5e906207]322                        self.detector.pixel_size.x = float(data_point)
323                        self.detector.pixel_size_unit = unit
[c94280c]324                    elif (key == u'y_pixel_size'
325                          and self.parent_class == u'SASdetector'):
[5e906207]326                        self.detector.pixel_size.y = float(data_point)
327                        self.detector.pixel_size_unit = unit
[c94280c]328                    elif (key == u'distance'
329                          and self.parent_class == u'SAScollimation'):
[ad52d31]330                        self.collimation.length = data_point
[d398285]331                        self.collimation.length_unit = unit
[c94280c]332                    elif (key == u'name'
333                          and self.parent_class == u'SAScollimation'):
[ad52d31]334                        self.collimation.name = data_point
[c94280c]335                    elif (key == u'shape'
336                          and self.parent_class == u'SASaperture'):
[082239e]337                        self.aperture.shape = data_point
[c94280c]338                    elif (key == u'x_gap'
339                          and self.parent_class == u'SASaperture'):
[082239e]340                        self.aperture.size.x = data_point
[c94280c]341                    elif (key == u'y_gap'
342                          and self.parent_class == u'SASaperture'):
[082239e]343                        self.aperture.size.y = data_point
344
345                    # Process Information
[c94280c]346                    elif (key == u'Title'
347                          and self.parent_class == u'SASprocess'): # CanSAS 2.0
[68aa210]348                        self.process.name = data_point
[c94280c]349                    elif (key == u'name'
350                          and self.parent_class == u'SASprocess'): # NXcanSAS
[68aa210]351                        self.process.name = data_point
[c94280c]352                    elif (key == u'description'
353                          and self.parent_class == u'SASprocess'):
[68aa210]354                        self.process.description = data_point
[d72567e]355                    elif key == u'date' and self.parent_class == u'SASprocess':
[68aa210]356                        self.process.date = data_point
[082239e]357                    elif key == u'term' and self.parent_class == u'SASprocess':
358                        self.process.term = data_point
[d72567e]359                    elif self.parent_class == u'SASprocess':
[ad52d31]360                        self.process.notes.append(data_point)
361
[082239e]362                    # Source
[c94280c]363                    elif (key == u'wavelength'
364                          and self.parent_class == u'SASdata'):
[d72567e]365                        self.current_datainfo.source.wavelength = data_point
[5e906207]366                        self.current_datainfo.source.wavelength_unit = unit
[c94280c]367                    elif (key == u'incident_wavelength'
368                          and self.parent_class == 'SASsource'):
[5e906207]369                        self.current_datainfo.source.wavelength = data_point
370                        self.current_datainfo.source.wavelength_unit = unit
[c94280c]371                    elif (key == u'wavelength_max'
372                          and self.parent_class == u'SASsource'):
[5e906207]373                        self.current_datainfo.source.wavelength_max = data_point
374                        self.current_datainfo.source.wavelength_max_unit = unit
[c94280c]375                    elif (key == u'wavelength_min'
376                          and self.parent_class == u'SASsource'):
[5e906207]377                        self.current_datainfo.source.wavelength_min = data_point
378                        self.current_datainfo.source.wavelength_min_unit = unit
[c94280c]379                    elif (key == u'incident_wavelength_spread'
380                          and self.parent_class == u'SASsource'):
[082239e]381                        self.current_datainfo.source.wavelength_spread = \
382                            data_point
383                        self.current_datainfo.source.wavelength_spread_unit = \
384                            unit
[c94280c]385                    elif (key == u'beam_size_x'
386                          and self.parent_class == u'SASsource'):
[5e906207]387                        self.current_datainfo.source.beam_size.x = data_point
388                        self.current_datainfo.source.beam_size_unit = unit
[c94280c]389                    elif (key == u'beam_size_y'
390                          and self.parent_class == u'SASsource'):
[5e906207]391                        self.current_datainfo.source.beam_size.y = data_point
392                        self.current_datainfo.source.beam_size_unit = unit
[c94280c]393                    elif (key == u'beam_shape'
394                          and self.parent_class == u'SASsource'):
[5e906207]395                        self.current_datainfo.source.beam_shape = data_point
[c94280c]396                    elif (key == u'radiation'
397                          and self.parent_class == u'SASsource'):
[d72567e]398                        self.current_datainfo.source.radiation = data_point
[c94280c]399                    elif (key == u'transmission'
400                          and self.parent_class == u'SASdata'):
[d72567e]401                        self.current_datainfo.sample.transmission = data_point
[68aa210]402
[082239e]403                    # Everything else goes in meta_data
[68aa210]404                    else:
[082239e]405                        new_key = self._create_unique_key(
406                            self.current_datainfo.meta_data, key)
[d72567e]407                        self.current_datainfo.meta_data[new_key] = data_point
[68aa210]408
409            else:
[082239e]410                # I don't know if this reachable code
[68aa210]411                self.errors.add("ShouldNeverHappenException")
412
[d72567e]413    def add_intermediate(self):
[ad52d31]414        """
[082239e]415        This method stores any intermediate objects within the final data set
416        after fully reading the set.
[ad52d31]417
[082239e]418        :param parent: The NXclass name for the h5py Group object that just
419                       finished being processed
[ad52d31]420        """
421
[d72567e]422        if self.parent_class == u'SASprocess':
423            self.current_datainfo.process.append(self.process)
[ad52d31]424            self.process = Process()
[d72567e]425        elif self.parent_class == u'SASdetector':
426            self.current_datainfo.detector.append(self.detector)
[ad52d31]427            self.detector = Detector()
[d72567e]428        elif self.parent_class == u'SAStransmission_spectrum':
429            self.current_datainfo.trans_spectrum.append(self.trans_spectrum)
[ad52d31]430            self.trans_spectrum = TransmissionSpectrum()
[d72567e]431        elif self.parent_class == u'SAScollimation':
432            self.current_datainfo.collimation.append(self.collimation)
[ad52d31]433            self.collimation = Collimation()
[d72567e]434        elif self.parent_class == u'SASaperture':
[ad52d31]435            self.collimation.aperture.append(self.aperture)
436            self.aperture = Aperture()
[d72567e]437        elif self.parent_class == u'SASdata':
[082239e]438            if isinstance(self.current_dataset, plottable_2D):
[d72567e]439                self.data2d.append(self.current_dataset)
[082239e]440            elif isinstance(self.current_dataset, plottable_1D):
[d72567e]441                self.data1d.append(self.current_dataset)
[68aa210]442
443    def final_data_cleanup(self):
444        """
[082239e]445        Does some final cleanup and formatting on self.current_datainfo and
446        all data1D and data2D objects and then combines the data and info into
447        Data1D and Data2D objects
[68aa210]448        """
[082239e]449        # Type cast data arrays to float64
[d72567e]450        if len(self.current_datainfo.trans_spectrum) > 0:
[ad52d31]451            spectrum_list = []
[d72567e]452            for spectrum in self.current_datainfo.trans_spectrum:
[ad52d31]453                spectrum.transmission = np.delete(spectrum.transmission, [0])
454                spectrum.transmission = spectrum.transmission.astype(np.float64)
[082239e]455                spectrum.transmission_deviation = np.delete(
456                    spectrum.transmission_deviation, [0])
457                spectrum.transmission_deviation = \
458                    spectrum.transmission_deviation.astype(np.float64)
[ad52d31]459                spectrum.wavelength = np.delete(spectrum.wavelength, [0])
460                spectrum.wavelength = spectrum.wavelength.astype(np.float64)
[d72567e]461                if len(spectrum.transmission) > 0:
462                    spectrum_list.append(spectrum)
463            self.current_datainfo.trans_spectrum = spectrum_list
[68aa210]464
[082239e]465        # Append errors to dataset and reset class errors
[d72567e]466        self.current_datainfo.errors = self.errors
[68aa210]467        self.errors.clear()
468
[082239e]469        # Combine all plottables with datainfo and append each to output
470        # Type cast data arrays to float64 and find min/max as appropriate
[d72567e]471        for dataset in self.data2d:
472            zeros = np.ones(dataset.data.size, dtype=bool)
473            try:
[082239e]474                for i in range(0, dataset.mask.size - 1):
[d72567e]475                    zeros[i] = dataset.mask[i]
476            except:
477                self.errors.add(sys.exc_value)
478            dataset.mask = zeros
[082239e]479            # Calculate the actual Q matrix
[d72567e]480            try:
481                if dataset.q_data.size <= 1:
[54544637]482                    dataset.q_data = np.sqrt(dataset.qx_data
483                                             * dataset.qx_data
484                                             + dataset.qy_data
485                                             * dataset.qy_data)
[d72567e]486            except:
487                dataset.q_data = None
[ac370c5]488
489            if dataset.data.ndim == 2:
490                (n_rows, n_cols) = dataset.data.shape
[479799c]491                dataset.y_bins = dataset.qy_data[0::n_cols]
[ac370c5]492                dataset.x_bins = dataset.qx_data[:n_cols]
493                dataset.data = dataset.data.flatten()
[9d786e5]494            self.current_dataset = dataset
495            self.send_to_output()
[d72567e]496
497        for dataset in self.data1d:
[9d786e5]498            self.current_dataset = dataset
499            self.send_to_output()
[d72567e]500
[68aa210]501    def add_data_set(self, key=""):
502        """
[082239e]503        Adds the current_dataset to the list of outputs after preforming final
504        processing on the data and then calls a private method to generate a
505        new data set.
[68aa210]506
507        :param key: NeXus group name for current tree level
508        """
[d72567e]509
510        if self.current_datainfo and self.current_dataset:
[68aa210]511            self.final_data_cleanup()
[d72567e]512        self.data1d = []
513        self.data2d = []
514        self.current_datainfo = DataInfo()
[68aa210]515
[54ba66e]516
[082239e]517    def _initialize_new_data_set(self, parent_list=None):
[68aa210]518        """
[082239e]519        A private class method to generate a new 1D or 2D data object based on
520        the type of data within the set. Outside methods should call
521        add_data_set() to be sure any existing data is stored properly.
[68aa210]522
[d72567e]523        :param parent_list: List of names of parent elements
[68aa210]524        """
[d72567e]525
526        if parent_list is None:
527            parent_list = []
528        if self._find_intermediate(parent_list, "Qx"):
529            self.current_dataset = plottable_2D()
[68aa210]530        else:
531            x = np.array(0)
532            y = np.array(0)
[d72567e]533            self.current_dataset = plottable_1D(x, y)
534        self.current_datainfo.filename = self.raw_data.filename
[68aa210]535
[d72567e]536    def _find_intermediate(self, parent_list, basename=""):
[ad52d31]537        """
[082239e]538        A private class used to find an entry by either using a direct key or
539        knowing the approximate basename.
[ad52d31]540
[082239e]541        :param parent_list: List of parents nodes in the HDF5 file
[d72567e]542        :param basename: Approximate name of an entry to search for
[ad52d31]543        :return:
544        """
[d72567e]545
546        entry = False
547        key_prog = re.compile(basename)
548        top = self.raw_data
549        for parent in parent_list:
550            top = top.get(parent)
551        for key in top.keys():
[082239e]552            if key_prog.match(key):
[d72567e]553                entry = True
554                break
[ad52d31]555        return entry
556
[68aa210]557    def _create_unique_key(self, dictionary, name, numb=0):
558        """
559        Create a unique key value for any dictionary to prevent overwriting
560        Recurses until a unique key value is found.
561
562        :param dictionary: A dictionary with any number of entries
563        :param name: The index of the item to be added to dictionary
564        :param numb: The number to be appended to the name, starts at 0
[d72567e]565        :return: The new name for the dictionary entry
[68aa210]566        """
567        if dictionary.get(name) is not None:
568            numb += 1
569            name = name.split("_")[0]
570            name += "_{0}".format(numb)
571            name = self._create_unique_key(dictionary, name, numb)
[d398285]572        return name
573
574    def _get_unit(self, value):
575        """
576        Find the unit for a particular value within the h5py dictionary
577
578        :param value: attribute dictionary for a particular value set
[d72567e]579        :return: unit for the value passed to the method
[d398285]580        """
[5c5e7fd]581        unit = h5attr(value, u'units')
[54544637]582        if unit is None:
[5c5e7fd]583            unit = h5attr(value, u'unit')
[082239e]584        # Convert the unit formats
[d398285]585        if unit == "1/A":
586            unit = "A^{-1}"
587        elif unit == "1/cm":
588            unit = "cm^{-1}"
[54ba66e]589        return unit
Note: See TracBrowser for help on using the repository browser.