source: sasview/src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py @ 282bc3f

magnetic_scattrelease-4.2.2ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249
Last change on this file since 282bc3f was 282bc3f, checked in by Jeff Krzywon <jkrzywon@…>, 8 months ago

Change errors in NXcanSAS reader to a list to match the rest of the readers and update unit test.

  • Property mode set to 100644
File size: 30.0 KB
Line 
1"""
2    CanSAS 2D data reader for reading HDF5 formatted CanSAS files.
3"""
4
5import h5py
6import numpy as np
7import re
8import os
9import sys
10
11from ..data_info import plottable_1D, plottable_2D,\
12    Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \
13    TransmissionSpectrum, Detector
14from ..loader_exceptions import FileContentsException, DefaultReaderException
15from ..file_reader_base_class import FileReader, decode
16
17
18def h5attr(node, key, default=None):
19    return decode(node.attrs.get(key, default))
20
21
22class Reader(FileReader):
23    """
24    A class for reading in CanSAS v2.0 data files. The existing iteration opens
25    Mantid generated HDF5 formatted files with file extension .h5/.H5. Any
26    number of data sets may be present within the file and any dimensionality
27    of data may be used. Currently 1D and 2D SAS data sets are supported, but
28    future implementations will include 1D and 2D SESANS data.
29
30    Any number of SASdata sets may be present in a SASentry and the data within
31    can be either 1D I(Q) or 2D I(Qx, Qy).
32
33    Also supports reading NXcanSAS formatted HDF5 files
34
35    :Dependencies:
36        The CanSAS HDF5 reader requires h5py => v2.5.0 or later.
37    """
38
39    # CanSAS version
40    cansas_version = 2.0
41    # Data type name
42    type_name = "NXcanSAS"
43    # Wildcards
44    type = ["NXcanSAS HDF5 Files (*.h5)|*.h5|"]
45    # List of allowed extensions
46    ext = ['.h5', '.H5']
47    # Flag to bypass extension check
48    allow_all = True
49
50    def get_file_contents(self):
51        """
52        This is the general read method that all SasView data_loaders must have.
53
54        :param filename: A path for an HDF5 formatted CanSAS 2D data file.
55        :return: List of Data1D/2D objects and/or a list of errors.
56        """
57        # Reinitialize when loading a new data file to reset all class variables
58        self.reset_state()
59
60        filename = self.f_open.name
61        self.f_open.close() # IO handled by h5py
62
63        # Check that the file exists
64        if os.path.isfile(filename):
65            basename = os.path.basename(filename)
66            _, extension = os.path.splitext(basename)
67            # If the file type is not allowed, return empty list
68            if extension in self.ext or self.allow_all:
69                # Load the data file
70                try:
71                    self.raw_data = h5py.File(filename, 'r')
72                except Exception as e:
73                    if extension not in self.ext:
74                        msg = "NXcanSAS Reader could not load file {}".format(
75                            basename + extension)
76                        raise DefaultReaderException(msg)
77                    raise FileContentsException(e.message)
78                try:
79                    # Read in all child elements of top level SASroot
80                    self.read_children(self.raw_data, [])
81                    # Add the last data set to the list of outputs
82                    self.add_data_set()
83                except Exception as exc:
84                    raise FileContentsException(exc.message)
85                finally:
86                    # Close the data file
87                    self.raw_data.close()
88
89                for data_set in self.output:
90                    if isinstance(data_set, Data1D):
91                        if data_set.x.size < 5:
92                            exception = FileContentsException(
93                                "Fewer than 5 data points found.")
94                            data_set.errors.append(exception)
95
96    def reset_state(self):
97        """
98        Create the reader object and define initial states for class variables
99        """
100        super(Reader, self).reset_state()
101        self.data1d = []
102        self.data2d = []
103        self.raw_data = None
104        self.errors = []
105        self.logging = []
106        self.q_names = []
107        self.mask_name = u''
108        self.i_name = u''
109        self.i_node = u''
110        self.i_uncertainties_name = u''
111        self.q_uncertainty_names = []
112        self.q_resolution_names = []
113        self.parent_class = u''
114        self.detector = Detector()
115        self.collimation = Collimation()
116        self.aperture = Aperture()
117        self.process = Process()
118        self.trans_spectrum = TransmissionSpectrum()
119
120    def read_children(self, data, parent_list):
121        """
122        A recursive method for stepping through the hierarchical data file.
123
124        :param data: h5py Group object of any kind
125        :param parent: h5py Group parent name
126        """
127
128        # Loop through each element of the parent and process accordingly
129        for key in data.keys():
130            # Get all information for the current key
131            value = data.get(key)
132            class_name = h5attr(value, u'canSAS_class')
133            if isinstance(class_name, (list, tuple, np.ndarray)):
134                class_name = class_name[0]
135            if class_name is None:
136                class_name = h5attr(value, u'NX_class')
137            if class_name is not None:
138                class_prog = re.compile(class_name)
139            else:
140                class_prog = re.compile(value.name)
141
142            if isinstance(value, h5py.Group):
143                # Set parent class before recursion
144                last_parent_class = self.parent_class
145                self.parent_class = class_name
146                parent_list.append(key)
147                # If a new sasentry, store the current data sets and create
148                # a fresh Data1D/2D object
149                if class_prog.match(u'SASentry'):
150                    self.add_data_set(key)
151                elif class_prog.match(u'SASdata'):
152                    self._initialize_new_data_set(value)
153                    self._find_data_attributes(value)
154                # Recursion step to access data within the group
155                self.read_children(value, parent_list)
156                self.add_intermediate()
157                # Reset parent class when returning from recursive method
158                self.parent_class = last_parent_class
159                parent_list.remove(key)
160
161            elif isinstance(value, h5py.Dataset):
162                # If this is a dataset, store the data appropriately
163                data_set = value.value
164                unit = self._get_unit(value)
165
166                for data_point in data_set:
167                    if isinstance(data_point, np.ndarray):
168                        if data_point.dtype.char == 'S':
169                            data_point = decode(bytes(data_point))
170                    else:
171                        data_point = decode(data_point)
172                    # Top Level Meta Data
173                    if key == u'definition':
174                        self.current_datainfo.meta_data['reader'] = data_set
175                        break
176                    # Run
177                    elif key == u'run':
178                        self.current_datainfo.run.append(data_set)
179                        try:
180                            run_name = h5attr(value, 'name')
181                            run_dict = {data_set: run_name}
182                            self.current_datainfo.run_name = run_dict
183                        except Exception:
184                            pass
185                        break
186                    # Title
187                    elif key == u'title':
188                        self.current_datainfo.title = data_set
189                    # Note
190                    elif key == u'SASnote':
191                        self.current_datainfo.notes.append(data_set)
192                        break
193                    # Sample Information
194                    elif self.parent_class == u'SASsample':
195                        self.process_sample(data_point, key)
196                    # Instrumental Information
197                    elif (key == u'name'
198                          and self.parent_class == u'SASinstrument'):
199                        self.current_datainfo.instrument = data_point
200                    # Detector
201                    elif self.parent_class == u'SASdetector':
202                        self.process_detector(data_point, key, unit)
203                    # Collimation
204                    elif self.parent_class == u'SAScollimation':
205                        self.process_collimation(data_point, key, unit)
206                    # Aperture
207                    elif self.parent_class == u'SASaperture':
208                        self.process_aperture(data_point, key)
209                    # Process Information
210                    elif self.parent_class == u'SASprocess': # CanSAS 2.0
211                        self.process_process(data_point, key)
212                    # Source
213                    elif self.parent_class == u'SASsource':
214                        self.process_source(data_point, key, unit)
215                    # Everything else goes in meta_data
216                    elif self.parent_class == u'SASdata':
217                        if isinstance(self.current_dataset, plottable_2D):
218                            self.process_2d_data_object(data_set, key, unit)
219                        else:
220                            self.process_1d_data_object(data_set, key, unit)
221
222                        break
223                    elif self.parent_class == u'SAStransmission_spectrum':
224                        self.process_trans_spectrum(data_set, key)
225                        break
226                    else:
227                        new_key = self._create_unique_key(
228                            self.current_datainfo.meta_data, key)
229                        self.current_datainfo.meta_data[new_key] = data_point
230
231            else:
232                # I don't know if this reachable code
233                self.errors.append("ShouldNeverHappenException")
234
235    def process_1d_data_object(self, data_set, key, unit):
236        """
237        SASdata processor method for 1d data items
238        :param data_set: data from HDF5 file
239        :param key: canSAS_class attribute
240        :param unit: unit attribute
241        """
242        if key == self.i_name:
243            self.current_dataset.y = data_set.flatten()
244            self.current_dataset.yaxis("Intensity", unit)
245        elif key == self.i_uncertainties_name:
246            self.current_dataset.dy = data_set.flatten()
247        elif key in self.q_names:
248            self.current_dataset.xaxis("Q", unit)
249            self.current_dataset.x = data_set.flatten()
250        elif key in self.q_resolution_names:
251            if (len(self.q_resolution_names) > 1
252                    and np.where(self.q_resolution_names == key)[0] == 0):
253                self.current_dataset.dxw = data_set.flatten()
254            elif (len(self.q_resolution_names) > 1
255                  and np.where(self.q_resolution_names == key)[0] == 1):
256                self.current_dataset.dxl = data_set.flatten()
257            else:
258                self.current_dataset.dx = data_set.flatten()
259        elif key in self.q_uncertainty_names:
260            if (len(self.q_uncertainty_names) > 1
261                    and np.where(self.q_uncertainty_names == key)[0] == 0):
262                self.current_dataset.dxw = data_set.flatten()
263            elif (len(self.q_uncertainty_names) > 1
264                  and np.where(self.q_uncertainty_names == key)[0] == 1):
265                self.current_dataset.dxl = data_set.flatten()
266            else:
267                self.current_dataset.dx = data_set.flatten()
268        elif key == self.mask_name:
269            self.current_dataset.mask = data_set.flatten()
270        elif key == u'wavelength':
271            self.current_datainfo.source.wavelength = data_set[0]
272            self.current_datainfo.source.wavelength_unit = unit
273
274    def process_2d_data_object(self, data_set, key, unit):
275        if key == self.i_name:
276            self.current_dataset.data = data_set
277            self.current_dataset.zaxis("Intensity", unit)
278        elif key == self.i_uncertainties_name:
279            self.current_dataset.err_data = data_set.flatten()
280        elif key in self.q_names:
281            self.current_dataset.xaxis("Q_x", unit)
282            self.current_dataset.yaxis("Q_y", unit)
283            if self.q_names[0] == self.q_names[1]:
284                # All q data in a single array
285                self.current_dataset.qx_data = data_set[0]
286                self.current_dataset.qy_data = data_set[1]
287            elif self.q_names.index(key) == 0:
288                self.current_dataset.qx_data = data_set
289            elif self.q_names.index(key) == 1:
290                self.current_dataset.qy_data = data_set
291        elif key in self.q_uncertainty_names or key in self.q_resolution_names:
292            if ((self.q_uncertainty_names[0] == self.q_uncertainty_names[1]) or
293                    (self.q_resolution_names[0] == self.q_resolution_names[1])):
294                # All q data in a single array
295                self.current_dataset.dqx_data = data_set[0].flatten()
296                self.current_dataset.dqy_data = data_set[1].flatten()
297            elif (self.q_uncertainty_names.index(key) == 0 or
298                  self.q_resolution_names.index(key) == 0):
299                self.current_dataset.dqx_data = data_set.flatten()
300            elif (self.q_uncertainty_names.index(key) == 1 or
301                  self.q_resolution_names.index(key) == 1):
302                self.current_dataset.dqy_data = data_set.flatten()
303                self.current_dataset.yaxis("Q_y", unit)
304        elif key == self.mask_name:
305            self.current_dataset.mask = data_set.flatten()
306        elif key == u'Qy':
307            self.current_dataset.yaxis("Q_y", unit)
308            self.current_dataset.qy_data = data_set.flatten()
309        elif key == u'Qydev':
310            self.current_dataset.dqy_data = data_set.flatten()
311        elif key == u'Qx':
312            self.current_dataset.xaxis("Q_x", unit)
313            self.current_dataset.qx_data = data_set.flatten()
314        elif key == u'Qxdev':
315            self.current_dataset.dqx_data = data_set.flatten()
316
317    def process_trans_spectrum(self, data_set, key):
318        """
319        SAStransmission_spectrum processor
320        :param data_set: data from HDF5 file
321        :param key: canSAS_class attribute
322        """
323        if key == u'T':
324            self.trans_spectrum.transmission = data_set.flatten()
325        elif key == u'Tdev':
326            self.trans_spectrum.transmission_deviation = data_set.flatten()
327        elif key == u'lambda':
328            self.trans_spectrum.wavelength = data_set.flatten()
329
330    def process_sample(self, data_point, key):
331        """
332        SASsample processor
333        :param data_point: Single point from an HDF5 data file
334        :param key: class name data_point was taken from
335        """
336        if key == u'Title':
337            self.current_datainfo.sample.name = data_point
338        elif key == u'name':
339            self.current_datainfo.sample.name = data_point
340        elif key == u'ID':
341            self.current_datainfo.sample.name = data_point
342        elif key == u'thickness':
343            self.current_datainfo.sample.thickness = data_point
344        elif key == u'temperature':
345            self.current_datainfo.sample.temperature = data_point
346        elif key == u'transmission':
347            self.current_datainfo.sample.transmission = data_point
348        elif key == u'x_position':
349            self.current_datainfo.sample.position.x = data_point
350        elif key == u'y_position':
351            self.current_datainfo.sample.position.y = data_point
352        elif key == u'pitch':
353            self.current_datainfo.sample.orientation.x = data_point
354        elif key == u'yaw':
355            self.current_datainfo.sample.orientation.y = data_point
356        elif key == u'roll':
357            self.current_datainfo.sample.orientation.z = data_point
358        elif key == u'details':
359            self.current_datainfo.sample.details.append(data_point)
360
361    def process_detector(self, data_point, key, unit):
362        """
363        SASdetector processor
364        :param data_point: Single point from an HDF5 data file
365        :param key: class name data_point was taken from
366        :param unit: unit attribute from data set
367        """
368        if key == u'name':
369            self.detector.name = data_point
370        elif key == u'SDD':
371            self.detector.distance = float(data_point)
372            self.detector.distance_unit = unit
373        elif key == u'slit_length':
374            self.detector.slit_length = float(data_point)
375            self.detector.slit_length_unit = unit
376        elif key == u'x_position':
377            self.detector.offset.x = float(data_point)
378            self.detector.offset_unit = unit
379        elif key == u'y_position':
380            self.detector.offset.y = float(data_point)
381            self.detector.offset_unit = unit
382        elif key == u'pitch':
383            self.detector.orientation.x = float(data_point)
384            self.detector.orientation_unit = unit
385        elif key == u'roll':
386            self.detector.orientation.z = float(data_point)
387            self.detector.orientation_unit = unit
388        elif key == u'yaw':
389            self.detector.orientation.y = float(data_point)
390            self.detector.orientation_unit = unit
391        elif key == u'beam_center_x':
392            self.detector.beam_center.x = float(data_point)
393            self.detector.beam_center_unit = unit
394        elif key == u'beam_center_y':
395            self.detector.beam_center.y = float(data_point)
396            self.detector.beam_center_unit = unit
397        elif key == u'x_pixel_size':
398            self.detector.pixel_size.x = float(data_point)
399            self.detector.pixel_size_unit = unit
400        elif key == u'y_pixel_size':
401            self.detector.pixel_size.y = float(data_point)
402            self.detector.pixel_size_unit = unit
403
404    def process_collimation(self, data_point, key, unit):
405        """
406        SAScollimation processor
407        :param data_point: Single point from an HDF5 data file
408        :param key: class name data_point was taken from
409        :param unit: unit attribute from data set
410        """
411        if key == u'distance':
412            self.collimation.length = data_point
413            self.collimation.length_unit = unit
414        elif key == u'name':
415            self.collimation.name = data_point
416
417    def process_aperture(self, data_point, key):
418        """
419        SASaperture processor
420        :param data_point: Single point from an HDF5 data file
421        :param key: class name data_point was taken from
422        """
423        if key == u'shape':
424            self.aperture.shape = data_point
425        elif key == u'x_gap':
426            self.aperture.size.x = data_point
427        elif key == u'y_gap':
428            self.aperture.size.y = data_point
429
430    def process_source(self, data_point, key, unit):
431        """
432        SASsource processor
433        :param data_point: Single point from an HDF5 data file
434        :param key: class name data_point was taken from
435        :param unit: unit attribute from data set
436        """
437        if key == u'incident_wavelength':
438            self.current_datainfo.source.wavelength = data_point
439            self.current_datainfo.source.wavelength_unit = unit
440        elif key == u'wavelength_max':
441            self.current_datainfo.source.wavelength_max = data_point
442            self.current_datainfo.source.wavelength_max_unit = unit
443        elif key == u'wavelength_min':
444            self.current_datainfo.source.wavelength_min = data_point
445            self.current_datainfo.source.wavelength_min_unit = unit
446        elif key == u'incident_wavelength_spread':
447            self.current_datainfo.source.wavelength_spread = data_point
448            self.current_datainfo.source.wavelength_spread_unit = unit
449        elif key == u'beam_size_x':
450            self.current_datainfo.source.beam_size.x = data_point
451            self.current_datainfo.source.beam_size_unit = unit
452        elif key == u'beam_size_y':
453            self.current_datainfo.source.beam_size.y = data_point
454            self.current_datainfo.source.beam_size_unit = unit
455        elif key == u'beam_shape':
456            self.current_datainfo.source.beam_shape = data_point
457        elif key == u'radiation':
458            self.current_datainfo.source.radiation = data_point
459
460    def process_process(self, data_point, key):
461        """
462        SASprocess processor
463        :param data_point: Single point from an HDF5 data file
464        :param key: class name data_point was taken from
465        """
466        term_match = re.compile(u'^term[0-9]+$')
467        if key == u'Title':  # CanSAS 2.0
468            self.process.name = data_point
469        elif key == u'name':  # NXcanSAS
470            self.process.name = data_point
471        elif key == u'description':
472            self.process.description = data_point
473        elif key == u'date':
474            self.process.date = data_point
475        elif term_match.match(key):
476            self.process.term.append(data_point)
477        else:
478            self.process.notes.append(data_point)
479
480    def add_intermediate(self):
481        """
482        This method stores any intermediate objects within the final data set
483        after fully reading the set.
484
485        :param parent: The NXclass name for the h5py Group object that just
486                       finished being processed
487        """
488
489        if self.parent_class == u'SASprocess':
490            self.current_datainfo.process.append(self.process)
491            self.process = Process()
492        elif self.parent_class == u'SASdetector':
493            self.current_datainfo.detector.append(self.detector)
494            self.detector = Detector()
495        elif self.parent_class == u'SAStransmission_spectrum':
496            self.current_datainfo.trans_spectrum.append(self.trans_spectrum)
497            self.trans_spectrum = TransmissionSpectrum()
498        elif self.parent_class == u'SAScollimation':
499            self.current_datainfo.collimation.append(self.collimation)
500            self.collimation = Collimation()
501        elif self.parent_class == u'SASaperture':
502            self.collimation.aperture.append(self.aperture)
503            self.aperture = Aperture()
504        elif self.parent_class == u'SASdata':
505            if isinstance(self.current_dataset, plottable_2D):
506                self.data2d.append(self.current_dataset)
507            elif isinstance(self.current_dataset, plottable_1D):
508                self.data1d.append(self.current_dataset)
509
510    def final_data_cleanup(self):
511        """
512        Does some final cleanup and formatting on self.current_datainfo and
513        all data1D and data2D objects and then combines the data and info into
514        Data1D and Data2D objects
515        """
516        # Type cast data arrays to float64
517        if len(self.current_datainfo.trans_spectrum) > 0:
518            spectrum_list = []
519            for spectrum in self.current_datainfo.trans_spectrum:
520                spectrum.transmission = spectrum.transmission.astype(np.float64)
521                spectrum.transmission_deviation = \
522                    spectrum.transmission_deviation.astype(np.float64)
523                spectrum.wavelength = spectrum.wavelength.astype(np.float64)
524                if len(spectrum.transmission) > 0:
525                    spectrum_list.append(spectrum)
526            self.current_datainfo.trans_spectrum = spectrum_list
527
528        # Append errors to dataset and reset class errors
529        self.current_datainfo.errors = self.errors
530        self.errors = []
531
532        # Combine all plottables with datainfo and append each to output
533        # Type cast data arrays to float64 and find min/max as appropriate
534        for dataset in self.data2d:
535            zeros = np.ones(dataset.data.size, dtype=bool)
536            try:
537                for i in range(0, dataset.mask.size - 1):
538                    zeros[i] = dataset.mask[i]
539            except:
540                self.errors.append(sys.exc_value)
541            dataset.mask = zeros
542            # Calculate the actual Q matrix
543            try:
544                if dataset.q_data.size <= 1:
545                    dataset.q_data = np.sqrt(dataset.qx_data
546                                             * dataset.qx_data
547                                             + dataset.qy_data
548                                             * dataset.qy_data)
549            except:
550                dataset.q_data = None
551
552            if dataset.data.ndim == 2:
553                (n_rows, n_cols) = dataset.data.shape
554                flat_qy = dataset.qy_data[0::n_cols].flatten()
555                # For 2D arrays of Qx and Qy, the Q value should be constant
556                # along each row -OR- each column. The direction is not
557                # specified in the NXcanSAS standard.
558                if flat_qy[0] == flat_qy[1]:
559                    flat_qy = np.transpose(dataset.qy_data)[0::n_cols].flatten()
560                dataset.y_bins = np.unique(flat_qy)
561                flat_qx = dataset.qx_data[0::n_rows].flatten()
562                # For 2D arrays of Qx and Qy, the Q value should be constant
563                # along each row -OR- each column. The direction is not
564                # specified in the NXcanSAS standard.
565                if flat_qx[0] == flat_qx[1]:
566                    flat_qx = np.transpose(dataset.qx_data)[0::n_rows].flatten()
567                dataset.x_bins = np.unique(flat_qx)
568                dataset.data = dataset.data.flatten()
569                dataset.qx_data = dataset.qx_data.flatten()
570                dataset.qy_data = dataset.qy_data.flatten()
571            self.current_dataset = dataset
572            self.send_to_output()
573
574        for dataset in self.data1d:
575            self.current_dataset = dataset
576            self.send_to_output()
577
578    def add_data_set(self, key=""):
579        """
580        Adds the current_dataset to the list of outputs after preforming final
581        processing on the data and then calls a private method to generate a
582        new data set.
583
584        :param key: NeXus group name for current tree level
585        """
586
587        if self.current_datainfo and self.current_dataset:
588            self.final_data_cleanup()
589        self.data1d = []
590        self.data2d = []
591        self.current_datainfo = DataInfo()
592
593    def _initialize_new_data_set(self, value=None):
594        """
595        A private class method to generate a new 1D or 2D data object based on
596        the type of data within the set. Outside methods should call
597        add_data_set() to be sure any existing data is stored properly.
598
599        :param parent_list: List of names of parent elements
600        """
601        if self._is2d(value):
602            self.current_dataset = plottable_2D()
603        else:
604            x = np.array(0)
605            y = np.array(0)
606            self.current_dataset = plottable_1D(x, y)
607        self.current_datainfo.filename = self.raw_data.filename
608        self.mask_name = u''
609        self.i_name = u''
610        self.i_node = u''
611        self.i_uncertainties_name = u''
612        self.q_names = []
613        self.q_uncertainty_names = []
614        self.q_resolution_names = []
615
616    @staticmethod
617    def check_is_list_or_array(iterable):
618        try:
619            iter(iterable)
620            if (not isinstance(iterable, np.ndarray) and not isinstance(
621                    iterable, list)) or (isinstance(iterable, basestring)):
622                raise TypeError
623        except TypeError:
624            if isinstance(iterable, basestring):
625                iterable = iterable.split(",")
626            else:
627                iterable = [iterable]
628        return iterable
629
630    def _find_data_attributes(self, value):
631        """
632        A class to find the indices for Q, the name of the Qdev and Idev, and
633        the name of the mask.
634        :param value: SASdata/NXdata HDF5 Group
635        """
636        attrs = value.attrs
637        signal = attrs.get("signal", "I")
638        i_axes = attrs.get("I_axes", ["Q"])
639        q_indices = attrs.get("Q_indices", [0])
640        q_indices = map(int, self.check_is_list_or_array(q_indices))
641        i_axes = self.check_is_list_or_array(i_axes)
642        keys = value.keys()
643        self.mask_name = attrs.get("mask")
644        for val in q_indices:
645            self.q_names.append(i_axes[val])
646        self.i_name = signal
647        self.i_node = value.get(self.i_name)
648        for item in self.q_names:
649            if item in keys:
650                q_vals = value.get(item)
651                if q_vals.attrs.get("uncertainties") is not None:
652                    self.q_uncertainty_names = q_vals.attrs.get("uncertainties")
653                elif q_vals.attrs.get("uncertainty") is not None:
654                    self.q_uncertainty_names = q_vals.attrs.get("uncertainty")
655                if isinstance(self.q_uncertainty_names, basestring):
656                    self.q_uncertainty_names = self.q_uncertainty_names.split(",")
657                if q_vals.attrs.get("resolutions") is not None:
658                    self.q_resolution_names = q_vals.attrs.get("resolutions")
659                if isinstance(self.q_resolution_names, basestring):
660                    self.q_resolution_names = self.q_resolution_names.split(",")
661        if self.i_name in keys:
662            i_vals = value.get(self.i_name)
663            self.i_uncertainties_name = i_vals.attrs.get("uncertainties")
664            if self.i_uncertainties_name is None:
665                self.i_uncertainties_name = i_vals.attrs.get("uncertainty")
666
667    def _is2d(self, value, basename="I"):
668        """
669        A private class to determine if the data set is 1d or 2d.
670
671        :param parent_list: List of parents nodes in the HDF5 file
672        :param basename: Approximate name of an entry to search for
673        :return: True if 2D, otherwise false
674        """
675
676        vals = value.get(basename)
677        return (vals is not None and vals.shape is not None
678                and len(vals.shape) != 1)
679
680    def _create_unique_key(self, dictionary, name, numb=0):
681        """
682        Create a unique key value for any dictionary to prevent overwriting
683        Recurses until a unique key value is found.
684
685        :param dictionary: A dictionary with any number of entries
686        :param name: The index of the item to be added to dictionary
687        :param numb: The number to be appended to the name, starts at 0
688        :return: The new name for the dictionary entry
689        """
690        if dictionary.get(name) is not None:
691            numb += 1
692            name = name.split("_")[0]
693            name += "_{0}".format(numb)
694            name = self._create_unique_key(dictionary, name, numb)
695        return name
696
697    def _get_unit(self, value):
698        """
699        Find the unit for a particular value within the h5py dictionary
700
701        :param value: attribute dictionary for a particular value set
702        :return: unit for the value passed to the method
703        """
704        unit = h5attr(value, u'units')
705        if unit is None:
706            unit = h5attr(value, u'unit')
707        return unit
Note: See TracBrowser for help on using the repository browser.