source: sasview/src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py @ c1dc994

magnetic_scattrelease-4.2.2ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249
Last change on this file since c1dc994 was c1dc994, checked in by Jeff Krzywon <jkrzywon@…>, 5 years ago

Differentiate between array and scalar datasets when loading meta data.

  • Property mode set to 100644
File size: 30.6 KB
RevLine 
[68aa210]1"""
2    CanSAS 2D data reader for reading HDF5 formatted CanSAS files.
3"""
4
5import h5py
6import numpy as np
7import re
8import os
9import sys
10
[7b50f14]11from ..data_info import plottable_1D, plottable_2D,\
[082239e]12    Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \
13    TransmissionSpectrum, Detector
[7b50f14]14from ..loader_exceptions import FileContentsException, DefaultReaderException
15from ..file_reader_base_class import FileReader, decode
[d72567e]16
[4fdcc65]17
[5c5e7fd]18def h5attr(node, key, default=None):
19    return decode(node.attrs.get(key, default))
[68aa210]20
[4fdcc65]21
[9d786e5]22class Reader(FileReader):
[68aa210]23    """
[082239e]24    A class for reading in CanSAS v2.0 data files. The existing iteration opens
25    Mantid generated HDF5 formatted files with file extension .h5/.H5. Any
26    number of data sets may be present within the file and any dimensionality
27    of data may be used. Currently 1D and 2D SAS data sets are supported, but
28    future implementations will include 1D and 2D SESANS data.
[d72567e]29
[082239e]30    Any number of SASdata sets may be present in a SASentry and the data within
31    can be either 1D I(Q) or 2D I(Qx, Qy).
[68aa210]32
[5e906207]33    Also supports reading NXcanSAS formatted HDF5 files
34
[68aa210]35    :Dependencies:
[d72567e]36        The CanSAS HDF5 reader requires h5py => v2.5.0 or later.
[68aa210]37    """
38
[082239e]39    # CanSAS version
[68aa210]40    cansas_version = 2.0
[082239e]41    # Data type name
[cf820f5]42    type_name = "NXcanSAS"
[082239e]43    # Wildcards
[cf820f5]44    type = ["NXcanSAS HDF5 Files (*.h5)|*.h5|"]
[082239e]45    # List of allowed extensions
[68aa210]46    ext = ['.h5', '.H5']
[082239e]47    # Flag to bypass extension check
[54544637]48    allow_all = True
[68aa210]49
[9d786e5]50    def get_file_contents(self):
[68aa210]51        """
[ad52d31]52        This is the general read method that all SasView data_loaders must have.
[68aa210]53
54        :param filename: A path for an HDF5 formatted CanSAS 2D data file.
[d72567e]55        :return: List of Data1D/2D objects and/or a list of errors.
[68aa210]56        """
[082239e]57        # Reinitialize when loading a new data file to reset all class variables
[61f329f0]58        self.reset_state()
[9d786e5]59
60        filename = self.f_open.name
61        self.f_open.close() # IO handled by h5py
62
[082239e]63        # Check that the file exists
[68aa210]64        if os.path.isfile(filename):
65            basename = os.path.basename(filename)
66            _, extension = os.path.splitext(basename)
67            # If the file type is not allowed, return empty list
68            if extension in self.ext or self.allow_all:
[082239e]69                # Load the data file
[7f75a3f]70                try:
71                    self.raw_data = h5py.File(filename, 'r')
72                except Exception as e:
[8dec7e7]73                    if extension not in self.ext:
[4fdcc65]74                        msg = "NXcanSAS Reader could not load file {}".format(
75                            basename + extension)
[8dec7e7]76                        raise DefaultReaderException(msg)
77                    raise FileContentsException(e.message)
[dcb91cf]78                try:
79                    # Read in all child elements of top level SASroot
80                    self.read_children(self.raw_data, [])
81                    # Add the last data set to the list of outputs
82                    self.add_data_set()
83                except Exception as exc:
84                    raise FileContentsException(exc.message)
85                finally:
86                    # Close the data file
87                    self.raw_data.close()
88
[4fdcc65]89                for data_set in self.output:
90                    if isinstance(data_set, Data1D):
91                        if data_set.x.size < 5:
92                            exception = FileContentsException(
93                                "Fewer than 5 data points found.")
94                            data_set.errors.append(exception)
[68aa210]95
[61f329f0]96    def reset_state(self):
[d72567e]97        """
98        Create the reader object and define initial states for class variables
99        """
[61f329f0]100        super(Reader, self).reset_state()
[d72567e]101        self.data1d = []
102        self.data2d = []
103        self.raw_data = None
[282bc3f]104        self.errors = []
[d72567e]105        self.logging = []
[b204004]106        self.q_names = []
[2651724]107        self.mask_name = u''
108        self.i_name = u''
109        self.i_node = u''
[b204004]110        self.i_uncertainties_name = u''
111        self.q_uncertainty_names = []
112        self.q_resolution_names = []
[d72567e]113        self.parent_class = u''
114        self.detector = Detector()
115        self.collimation = Collimation()
116        self.aperture = Aperture()
117        self.process = Process()
118        self.trans_spectrum = TransmissionSpectrum()
119
120    def read_children(self, data, parent_list):
[68aa210]121        """
[ad52d31]122        A recursive method for stepping through the hierarchical data file.
[68aa210]123
124        :param data: h5py Group object of any kind
125        :param parent: h5py Group parent name
126        """
127
[082239e]128        # Loop through each element of the parent and process accordingly
[68aa210]129        for key in data.keys():
[082239e]130            # Get all information for the current key
[68aa210]131            value = data.get(key)
[7b50f14]132            class_name = h5attr(value, u'canSAS_class')
[2ca5d57b]133            if isinstance(class_name, (list, tuple, np.ndarray)):
134                class_name = class_name[0]
[7b50f14]135            if class_name is None:
[5c5e7fd]136                class_name = h5attr(value, u'NX_class')
[68aa210]137            if class_name is not None:
138                class_prog = re.compile(class_name)
139            else:
140                class_prog = re.compile(value.name)
141
142            if isinstance(value, h5py.Group):
[c9ecd1b]143                # Set parent class before recursion
[8f882fe]144                last_parent_class = self.parent_class
[d72567e]145                self.parent_class = class_name
146                parent_list.append(key)
[082239e]147                # If a new sasentry, store the current data sets and create
148                # a fresh Data1D/2D object
[68aa210]149                if class_prog.match(u'SASentry'):
150                    self.add_data_set(key)
[d72567e]151                elif class_prog.match(u'SASdata'):
[8f882fe]152                    self._initialize_new_data_set(value)
[9e0dd49]153                    self._find_data_attributes(value)
[082239e]154                # Recursion step to access data within the group
[d72567e]155                self.read_children(value, parent_list)
156                self.add_intermediate()
[8f882fe]157                # Reset parent class when returning from recursive method
158                self.parent_class = last_parent_class
[d72567e]159                parent_list.remove(key)
[68aa210]160
161            elif isinstance(value, h5py.Dataset):
[082239e]162                # If this is a dataset, store the data appropriately
[9dc1500]163                data_set = value.value
[7bd6860a]164                unit = self._get_unit(value)
[ac370c5]165
[68aa210]166                for data_point in data_set:
[2b538cd]167                    if isinstance(data_point, np.ndarray):
168                        if data_point.dtype.char == 'S':
169                            data_point = decode(bytes(data_point))
170                    else:
171                        data_point = decode(data_point)
[082239e]172                    # Top Level Meta Data
[68aa210]173                    if key == u'definition':
[c1dc994]174                        if isinstance(data_set, basestring):
175                            self.current_datainfo.meta_data['reader'] = data_set
176                            break
177                        else:
178                            self.current_datainfo.meta_data[
179                                'reader'] = data_point
[0d93464]180                    # Run
[68aa210]181                    elif key == u'run':
[be88076]182                        try:
[5c5e7fd]183                            run_name = h5attr(value, 'name')
[dfcdbf8]184                            run_dict = {data_set: run_name}
[be88076]185                            self.current_datainfo.run_name = run_dict
[7b50f14]186                        except Exception:
[be88076]187                            pass
[c1dc994]188                        if isinstance(data_set, basestring):
189                            self.current_datainfo.run.append(data_set)
190                            break
191                        else:
192                            self.current_datainfo.run.append(data_point)
[0d93464]193                    # Title
[68aa210]194                    elif key == u'title':
[c1dc994]195                        if isinstance(data_set, basestring):
196                            self.current_datainfo.title = data_set
197                            break
198                        else:
199                            self.current_datainfo.title = data_point
[0d93464]200                    # Note
[68aa210]201                    elif key == u'SASnote':
[dfcdbf8]202                        self.current_datainfo.notes.append(data_set)
203                        break
[082239e]204                    # Sample Information
[0d93464]205                    elif self.parent_class == u'SASsample':
206                        self.process_sample(data_point, key)
[082239e]207                    # Instrumental Information
[c94280c]208                    elif (key == u'name'
209                          and self.parent_class == u'SASinstrument'):
[d72567e]210                        self.current_datainfo.instrument = data_point
[0d93464]211                    # Detector
212                    elif self.parent_class == u'SASdetector':
213                        self.process_detector(data_point, key, unit)
214                    # Collimation
215                    elif self.parent_class == u'SAScollimation':
216                        self.process_collimation(data_point, key, unit)
217                    # Aperture
218                    elif self.parent_class == u'SASaperture':
219                        self.process_aperture(data_point, key)
[082239e]220                    # Process Information
[0d93464]221                    elif self.parent_class == u'SASprocess': # CanSAS 2.0
222                        self.process_process(data_point, key)
[082239e]223                    # Source
[0d93464]224                    elif self.parent_class == u'SASsource':
225                        self.process_source(data_point, key, unit)
[082239e]226                    # Everything else goes in meta_data
[0d93464]227                    elif self.parent_class == u'SASdata':
[96d06a4]228                        if isinstance(self.current_dataset, plottable_2D):
229                            self.process_2d_data_object(data_set, key, unit)
230                        else:
231                            self.process_1d_data_object(data_set, key, unit)
232
[0d93464]233                        break
234                    elif self.parent_class == u'SAStransmission_spectrum':
235                        self.process_trans_spectrum(data_set, key)
236                        break
[68aa210]237                    else:
[082239e]238                        new_key = self._create_unique_key(
239                            self.current_datainfo.meta_data, key)
[d72567e]240                        self.current_datainfo.meta_data[new_key] = data_point
[68aa210]241
242            else:
[082239e]243                # I don't know if this reachable code
[282bc3f]244                self.errors.append("ShouldNeverHappenException")
[68aa210]245
[96d06a4]246    def process_1d_data_object(self, data_set, key, unit):
[0d93464]247        """
[96d06a4]248        SASdata processor method for 1d data items
[0d93464]249        :param data_set: data from HDF5 file
250        :param key: canSAS_class attribute
251        :param unit: unit attribute
252        """
[2651724]253        if key == self.i_name:
[96d06a4]254            self.current_dataset.y = data_set.flatten()
255            self.current_dataset.yaxis("Intensity", unit)
[b204004]256        elif key == self.i_uncertainties_name:
[96d06a4]257            self.current_dataset.dy = data_set.flatten()
[b204004]258        elif key in self.q_names:
[0d93464]259            self.current_dataset.xaxis("Q", unit)
[96d06a4]260            self.current_dataset.x = data_set.flatten()
[b204004]261        elif key in self.q_resolution_names:
262            if (len(self.q_resolution_names) > 1
263                    and np.where(self.q_resolution_names == key)[0] == 0):
[2651724]264                self.current_dataset.dxw = data_set.flatten()
[b204004]265            elif (len(self.q_resolution_names) > 1
266                  and np.where(self.q_resolution_names == key)[0] == 1):
267                self.current_dataset.dxl = data_set.flatten()
268            else:
269                self.current_dataset.dx = data_set.flatten()
270        elif key in self.q_uncertainty_names:
271            if (len(self.q_uncertainty_names) > 1
272                    and np.where(self.q_uncertainty_names == key)[0] == 0):
273                self.current_dataset.dxw = data_set.flatten()
274            elif (len(self.q_uncertainty_names) > 1
275                  and np.where(self.q_uncertainty_names == key)[0] == 1):
[2651724]276                self.current_dataset.dxl = data_set.flatten()
277            else:
278                self.current_dataset.dx = data_set.flatten()
[96d06a4]279        elif key == self.mask_name:
280            self.current_dataset.mask = data_set.flatten()
281        elif key == u'wavelength':
282            self.current_datainfo.source.wavelength = data_set[0]
283            self.current_datainfo.source.wavelength_unit = unit
284
285    def process_2d_data_object(self, data_set, key, unit):
286        if key == self.i_name:
[4fdcc65]287            self.current_dataset.data = data_set
[96d06a4]288            self.current_dataset.zaxis("Intensity", unit)
[b204004]289        elif key == self.i_uncertainties_name:
[96d06a4]290            self.current_dataset.err_data = data_set.flatten()
[b204004]291        elif key in self.q_names:
[c2525bf]292            self.current_dataset.xaxis("Q_x", unit)
293            self.current_dataset.yaxis("Q_y", unit)
[b204004]294            if self.q_names[0] == self.q_names[1]:
[c2525bf]295                # All q data in a single array
[4fdcc65]296                self.current_dataset.qx_data = data_set[0]
297                self.current_dataset.qy_data = data_set[1]
[b204004]298            elif self.q_names.index(key) == 0:
[4fdcc65]299                self.current_dataset.qx_data = data_set
[b204004]300            elif self.q_names.index(key) == 1:
[4fdcc65]301                self.current_dataset.qy_data = data_set
[b204004]302        elif key in self.q_uncertainty_names or key in self.q_resolution_names:
303            if ((self.q_uncertainty_names[0] == self.q_uncertainty_names[1]) or
304                    (self.q_resolution_names[0] == self.q_resolution_names[1])):
[c2525bf]305                # All q data in a single array
306                self.current_dataset.dqx_data = data_set[0].flatten()
307                self.current_dataset.dqy_data = data_set[1].flatten()
[b204004]308            elif (self.q_uncertainty_names.index(key) == 0 or
309                  self.q_resolution_names.index(key) == 0):
[c2525bf]310                self.current_dataset.dqx_data = data_set.flatten()
[b204004]311            elif (self.q_uncertainty_names.index(key) == 1 or
312                  self.q_resolution_names.index(key) == 1):
[c2525bf]313                self.current_dataset.dqy_data = data_set.flatten()
314                self.current_dataset.yaxis("Q_y", unit)
315        elif key == self.mask_name:
316            self.current_dataset.mask = data_set.flatten()
[0d93464]317        elif key == u'Qy':
318            self.current_dataset.yaxis("Q_y", unit)
319            self.current_dataset.qy_data = data_set.flatten()
320        elif key == u'Qydev':
321            self.current_dataset.dqy_data = data_set.flatten()
322        elif key == u'Qx':
323            self.current_dataset.xaxis("Q_x", unit)
324            self.current_dataset.qx_data = data_set.flatten()
325        elif key == u'Qxdev':
326            self.current_dataset.dqx_data = data_set.flatten()
327
328    def process_trans_spectrum(self, data_set, key):
329        """
330        SAStransmission_spectrum processor
331        :param data_set: data from HDF5 file
332        :param key: canSAS_class attribute
333        """
334        if key == u'T':
335            self.trans_spectrum.transmission = data_set.flatten()
336        elif key == u'Tdev':
337            self.trans_spectrum.transmission_deviation = data_set.flatten()
338        elif key == u'lambda':
339            self.trans_spectrum.wavelength = data_set.flatten()
340
341    def process_sample(self, data_point, key):
342        """
343        SASsample processor
344        :param data_point: Single point from an HDF5 data file
345        :param key: class name data_point was taken from
346        """
347        if key == u'Title':
348            self.current_datainfo.sample.name = data_point
349        elif key == u'name':
350            self.current_datainfo.sample.name = data_point
351        elif key == u'ID':
352            self.current_datainfo.sample.name = data_point
353        elif key == u'thickness':
354            self.current_datainfo.sample.thickness = data_point
355        elif key == u'temperature':
356            self.current_datainfo.sample.temperature = data_point
357        elif key == u'transmission':
358            self.current_datainfo.sample.transmission = data_point
359        elif key == u'x_position':
360            self.current_datainfo.sample.position.x = data_point
361        elif key == u'y_position':
362            self.current_datainfo.sample.position.y = data_point
363        elif key == u'pitch':
364            self.current_datainfo.sample.orientation.x = data_point
365        elif key == u'yaw':
366            self.current_datainfo.sample.orientation.y = data_point
367        elif key == u'roll':
368            self.current_datainfo.sample.orientation.z = data_point
369        elif key == u'details':
370            self.current_datainfo.sample.details.append(data_point)
371
372    def process_detector(self, data_point, key, unit):
373        """
374        SASdetector processor
375        :param data_point: Single point from an HDF5 data file
376        :param key: class name data_point was taken from
377        :param unit: unit attribute from data set
378        """
379        if key == u'name':
380            self.detector.name = data_point
381        elif key == u'SDD':
382            self.detector.distance = float(data_point)
383            self.detector.distance_unit = unit
384        elif key == u'slit_length':
385            self.detector.slit_length = float(data_point)
386            self.detector.slit_length_unit = unit
387        elif key == u'x_position':
388            self.detector.offset.x = float(data_point)
389            self.detector.offset_unit = unit
390        elif key == u'y_position':
391            self.detector.offset.y = float(data_point)
392            self.detector.offset_unit = unit
393        elif key == u'pitch':
394            self.detector.orientation.x = float(data_point)
395            self.detector.orientation_unit = unit
396        elif key == u'roll':
397            self.detector.orientation.z = float(data_point)
398            self.detector.orientation_unit = unit
399        elif key == u'yaw':
400            self.detector.orientation.y = float(data_point)
401            self.detector.orientation_unit = unit
402        elif key == u'beam_center_x':
403            self.detector.beam_center.x = float(data_point)
404            self.detector.beam_center_unit = unit
405        elif key == u'beam_center_y':
406            self.detector.beam_center.y = float(data_point)
407            self.detector.beam_center_unit = unit
408        elif key == u'x_pixel_size':
409            self.detector.pixel_size.x = float(data_point)
410            self.detector.pixel_size_unit = unit
411        elif key == u'y_pixel_size':
412            self.detector.pixel_size.y = float(data_point)
413            self.detector.pixel_size_unit = unit
414
415    def process_collimation(self, data_point, key, unit):
416        """
417        SAScollimation processor
418        :param data_point: Single point from an HDF5 data file
419        :param key: class name data_point was taken from
420        :param unit: unit attribute from data set
421        """
422        if key == u'distance':
423            self.collimation.length = data_point
424            self.collimation.length_unit = unit
425        elif key == u'name':
426            self.collimation.name = data_point
427
428    def process_aperture(self, data_point, key):
429        """
430        SASaperture processor
431        :param data_point: Single point from an HDF5 data file
432        :param key: class name data_point was taken from
433        """
434        if key == u'shape':
435            self.aperture.shape = data_point
436        elif key == u'x_gap':
437            self.aperture.size.x = data_point
438        elif key == u'y_gap':
439            self.aperture.size.y = data_point
440
441    def process_source(self, data_point, key, unit):
442        """
443        SASsource processor
444        :param data_point: Single point from an HDF5 data file
445        :param key: class name data_point was taken from
446        :param unit: unit attribute from data set
447        """
448        if key == u'incident_wavelength':
449            self.current_datainfo.source.wavelength = data_point
450            self.current_datainfo.source.wavelength_unit = unit
451        elif key == u'wavelength_max':
452            self.current_datainfo.source.wavelength_max = data_point
453            self.current_datainfo.source.wavelength_max_unit = unit
454        elif key == u'wavelength_min':
455            self.current_datainfo.source.wavelength_min = data_point
456            self.current_datainfo.source.wavelength_min_unit = unit
457        elif key == u'incident_wavelength_spread':
458            self.current_datainfo.source.wavelength_spread = data_point
459            self.current_datainfo.source.wavelength_spread_unit = unit
460        elif key == u'beam_size_x':
461            self.current_datainfo.source.beam_size.x = data_point
462            self.current_datainfo.source.beam_size_unit = unit
463        elif key == u'beam_size_y':
464            self.current_datainfo.source.beam_size.y = data_point
465            self.current_datainfo.source.beam_size_unit = unit
466        elif key == u'beam_shape':
467            self.current_datainfo.source.beam_shape = data_point
468        elif key == u'radiation':
469            self.current_datainfo.source.radiation = data_point
470
471    def process_process(self, data_point, key):
472        """
473        SASprocess processor
474        :param data_point: Single point from an HDF5 data file
475        :param key: class name data_point was taken from
476        """
[ac38ab4]477        term_match = re.compile(u'^term[0-9]+$')
[0d93464]478        if key == u'Title':  # CanSAS 2.0
479            self.process.name = data_point
480        elif key == u'name':  # NXcanSAS
481            self.process.name = data_point
482        elif key == u'description':
483            self.process.description = data_point
484        elif key == u'date':
485            self.process.date = data_point
[ac38ab4]486        elif term_match.match(key):
487            self.process.term.append(data_point)
[0d93464]488        else:
489            self.process.notes.append(data_point)
490
[d72567e]491    def add_intermediate(self):
[ad52d31]492        """
[082239e]493        This method stores any intermediate objects within the final data set
494        after fully reading the set.
[ad52d31]495
[082239e]496        :param parent: The NXclass name for the h5py Group object that just
497                       finished being processed
[ad52d31]498        """
499
[d72567e]500        if self.parent_class == u'SASprocess':
501            self.current_datainfo.process.append(self.process)
[ad52d31]502            self.process = Process()
[d72567e]503        elif self.parent_class == u'SASdetector':
504            self.current_datainfo.detector.append(self.detector)
[ad52d31]505            self.detector = Detector()
[d72567e]506        elif self.parent_class == u'SAStransmission_spectrum':
507            self.current_datainfo.trans_spectrum.append(self.trans_spectrum)
[ad52d31]508            self.trans_spectrum = TransmissionSpectrum()
[d72567e]509        elif self.parent_class == u'SAScollimation':
510            self.current_datainfo.collimation.append(self.collimation)
[ad52d31]511            self.collimation = Collimation()
[d72567e]512        elif self.parent_class == u'SASaperture':
[ad52d31]513            self.collimation.aperture.append(self.aperture)
514            self.aperture = Aperture()
[d72567e]515        elif self.parent_class == u'SASdata':
[082239e]516            if isinstance(self.current_dataset, plottable_2D):
[d72567e]517                self.data2d.append(self.current_dataset)
[082239e]518            elif isinstance(self.current_dataset, plottable_1D):
[d72567e]519                self.data1d.append(self.current_dataset)
[68aa210]520
521    def final_data_cleanup(self):
522        """
[082239e]523        Does some final cleanup and formatting on self.current_datainfo and
524        all data1D and data2D objects and then combines the data and info into
525        Data1D and Data2D objects
[68aa210]526        """
[082239e]527        # Type cast data arrays to float64
[d72567e]528        if len(self.current_datainfo.trans_spectrum) > 0:
[ad52d31]529            spectrum_list = []
[d72567e]530            for spectrum in self.current_datainfo.trans_spectrum:
[ad52d31]531                spectrum.transmission = spectrum.transmission.astype(np.float64)
[082239e]532                spectrum.transmission_deviation = \
533                    spectrum.transmission_deviation.astype(np.float64)
[ad52d31]534                spectrum.wavelength = spectrum.wavelength.astype(np.float64)
[d72567e]535                if len(spectrum.transmission) > 0:
536                    spectrum_list.append(spectrum)
537            self.current_datainfo.trans_spectrum = spectrum_list
[68aa210]538
[082239e]539        # Append errors to dataset and reset class errors
[d72567e]540        self.current_datainfo.errors = self.errors
[282bc3f]541        self.errors = []
[68aa210]542
[082239e]543        # Combine all plottables with datainfo and append each to output
544        # Type cast data arrays to float64 and find min/max as appropriate
[d72567e]545        for dataset in self.data2d:
546            zeros = np.ones(dataset.data.size, dtype=bool)
547            try:
[082239e]548                for i in range(0, dataset.mask.size - 1):
[d72567e]549                    zeros[i] = dataset.mask[i]
550            except:
[282bc3f]551                self.errors.append(sys.exc_value)
[d72567e]552            dataset.mask = zeros
[082239e]553            # Calculate the actual Q matrix
[d72567e]554            try:
555                if dataset.q_data.size <= 1:
[54544637]556                    dataset.q_data = np.sqrt(dataset.qx_data
557                                             * dataset.qx_data
558                                             + dataset.qy_data
559                                             * dataset.qy_data)
[d72567e]560            except:
561                dataset.q_data = None
[ac370c5]562
563            if dataset.data.ndim == 2:
564                (n_rows, n_cols) = dataset.data.shape
[4fdcc65]565                flat_qy = dataset.qy_data[0::n_cols].flatten()
[b204004]566                # For 2D arrays of Qx and Qy, the Q value should be constant
567                # along each row -OR- each column. The direction is not
568                # specified in the NXcanSAS standard.
[4fdcc65]569                if flat_qy[0] == flat_qy[1]:
570                    flat_qy = np.transpose(dataset.qy_data)[0::n_cols].flatten()
571                dataset.y_bins = np.unique(flat_qy)
572                flat_qx = dataset.qx_data[0::n_rows].flatten()
[b204004]573                # For 2D arrays of Qx and Qy, the Q value should be constant
574                # along each row -OR- each column. The direction is not
575                # specified in the NXcanSAS standard.
[4fdcc65]576                if flat_qx[0] == flat_qx[1]:
577                    flat_qx = np.transpose(dataset.qx_data)[0::n_rows].flatten()
578                dataset.x_bins = np.unique(flat_qx)
[ac370c5]579                dataset.data = dataset.data.flatten()
[4fdcc65]580                dataset.qx_data = dataset.qx_data.flatten()
581                dataset.qy_data = dataset.qy_data.flatten()
[9d786e5]582            self.current_dataset = dataset
583            self.send_to_output()
[d72567e]584
585        for dataset in self.data1d:
[9d786e5]586            self.current_dataset = dataset
587            self.send_to_output()
[d72567e]588
[68aa210]589    def add_data_set(self, key=""):
590        """
[082239e]591        Adds the current_dataset to the list of outputs after preforming final
592        processing on the data and then calls a private method to generate a
593        new data set.
[68aa210]594
595        :param key: NeXus group name for current tree level
596        """
[d72567e]597
598        if self.current_datainfo and self.current_dataset:
[68aa210]599            self.final_data_cleanup()
[d72567e]600        self.data1d = []
601        self.data2d = []
602        self.current_datainfo = DataInfo()
[68aa210]603
[8f882fe]604    def _initialize_new_data_set(self, value=None):
[68aa210]605        """
[082239e]606        A private class method to generate a new 1D or 2D data object based on
607        the type of data within the set. Outside methods should call
608        add_data_set() to be sure any existing data is stored properly.
[68aa210]609
[d72567e]610        :param parent_list: List of names of parent elements
[68aa210]611        """
[8f882fe]612        if self._is2d(value):
[d72567e]613            self.current_dataset = plottable_2D()
[68aa210]614        else:
615            x = np.array(0)
616            y = np.array(0)
[d72567e]617            self.current_dataset = plottable_1D(x, y)
618        self.current_datainfo.filename = self.raw_data.filename
[b204004]619        self.mask_name = u''
620        self.i_name = u''
621        self.i_node = u''
622        self.i_uncertainties_name = u''
623        self.q_names = []
624        self.q_uncertainty_names = []
625        self.q_resolution_names = []
[68aa210]626
[cf29187]627    @staticmethod
628    def check_is_list_or_array(iterable):
629        try:
630            iter(iterable)
[4fdcc65]631            if (not isinstance(iterable, np.ndarray) and not isinstance(
[926ece5]632                    iterable, list)) or (isinstance(iterable, basestring)):
[cf29187]633                raise TypeError
634        except TypeError:
[dfcdbf8]635            if isinstance(iterable, basestring):
636                iterable = iterable.split(",")
637            else:
638                iterable = [iterable]
[cf29187]639        return iterable
640
[9e0dd49]641    def _find_data_attributes(self, value):
[2651724]642        """
643        A class to find the indices for Q, the name of the Qdev and Idev, and
644        the name of the mask.
645        :param value: SASdata/NXdata HDF5 Group
646        """
[9e0dd49]647        attrs = value.attrs
[0bd8fac]648        signal = attrs.get("signal", "I")
649        i_axes = attrs.get("I_axes", ["Q"])
650        q_indices = attrs.get("Q_indices", [0])
[cf29187]651        q_indices = map(int, self.check_is_list_or_array(q_indices))
652        i_axes = self.check_is_list_or_array(i_axes)
[9e0dd49]653        keys = value.keys()
[18af6d2]654        self.mask_name = attrs.get("mask")
[2651724]655        for val in q_indices:
[b204004]656            self.q_names.append(i_axes[val])
[9e0dd49]657        self.i_name = signal
[2651724]658        self.i_node = value.get(self.i_name)
[b204004]659        for item in self.q_names:
[2651724]660            if item in keys:
661                q_vals = value.get(item)
[0bd8fac]662                if q_vals.attrs.get("uncertainties") is not None:
[b204004]663                    self.q_uncertainty_names = q_vals.attrs.get("uncertainties")
[0bd8fac]664                elif q_vals.attrs.get("uncertainty") is not None:
[b204004]665                    self.q_uncertainty_names = q_vals.attrs.get("uncertainty")
666                if isinstance(self.q_uncertainty_names, basestring):
667                    self.q_uncertainty_names = self.q_uncertainty_names.split(",")
[0bd8fac]668                if q_vals.attrs.get("resolutions") is not None:
[b204004]669                    self.q_resolution_names = q_vals.attrs.get("resolutions")
670                if isinstance(self.q_resolution_names, basestring):
671                    self.q_resolution_names = self.q_resolution_names.split(",")
[9e0dd49]672        if self.i_name in keys:
673            i_vals = value.get(self.i_name)
[b204004]674            self.i_uncertainties_name = i_vals.attrs.get("uncertainties")
675            if self.i_uncertainties_name is None:
676                self.i_uncertainties_name = i_vals.attrs.get("uncertainty")
[9e0dd49]677
[8f882fe]678    def _is2d(self, value, basename="I"):
[ad52d31]679        """
[8f882fe]680        A private class to determine if the data set is 1d or 2d.
[ad52d31]681
[082239e]682        :param parent_list: List of parents nodes in the HDF5 file
[d72567e]683        :param basename: Approximate name of an entry to search for
[8f882fe]684        :return: True if 2D, otherwise false
685        """
686
687        vals = value.get(basename)
688        return (vals is not None and vals.shape is not None
689                and len(vals.shape) != 1)
[ad52d31]690
[68aa210]691    def _create_unique_key(self, dictionary, name, numb=0):
692        """
693        Create a unique key value for any dictionary to prevent overwriting
694        Recurses until a unique key value is found.
695
696        :param dictionary: A dictionary with any number of entries
697        :param name: The index of the item to be added to dictionary
698        :param numb: The number to be appended to the name, starts at 0
[d72567e]699        :return: The new name for the dictionary entry
[68aa210]700        """
701        if dictionary.get(name) is not None:
702            numb += 1
703            name = name.split("_")[0]
704            name += "_{0}".format(numb)
705            name = self._create_unique_key(dictionary, name, numb)
[d398285]706        return name
707
708    def _get_unit(self, value):
709        """
710        Find the unit for a particular value within the h5py dictionary
711
712        :param value: attribute dictionary for a particular value set
[d72567e]713        :return: unit for the value passed to the method
[d398285]714        """
[5c5e7fd]715        unit = h5attr(value, u'units')
[54544637]716        if unit is None:
[5c5e7fd]717            unit = h5attr(value, u'unit')
[54ba66e]718        return unit
Note: See TracBrowser for help on using the repository browser.