source: sasview/src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py @ 61a2a0c

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalccostrafo411magnetic_scattrelease-4.1.1release-4.1.2release-4.2.2ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249ticket885unittest-saveload
Last change on this file since 61a2a0c was 5e906207, checked in by lewis, 8 years ago

Read all metadata from HDF5 files

  • Property mode set to 100644
File size: 24.9 KB
RevLine 
[68aa210]1"""
2    CanSAS 2D data reader for reading HDF5 formatted CanSAS files.
3"""
4
5import h5py
6import numpy as np
7import re
8import os
9import sys
10
[d72567e]11from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D, Data1D, Data2D, DataInfo, Process, Aperture
12from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector
13from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable
14
[68aa210]15
16
17class Reader():
18    """
[ad52d31]19    A class for reading in CanSAS v2.0 data files. The existing iteration opens Mantid generated HDF5 formatted files
20    with file extension .h5/.H5. Any number of data sets may be present within the file and any dimensionality of data
21    may be used. Currently 1D and 2D SAS data sets are supported, but future implementations will include 1D and 2D
[d72567e]22    SESANS data.
23
24    Any number of SASdata sets may be present in a SASentry and the data within can be either 1D I(Q) or 2D I(Qx, Qy).
[68aa210]25
[5e906207]26    Also supports reading NXcanSAS formatted HDF5 files
27
[68aa210]28    :Dependencies:
[d72567e]29        The CanSAS HDF5 reader requires h5py => v2.5.0 or later.
[68aa210]30    """
31
32    ## CanSAS version
33    cansas_version = 2.0
34    ## Logged warnings or messages
35    logging = None
36    ## List of errors for the current data set
37    errors = None
38    ## Raw file contents to be processed
39    raw_data = None
[d72567e]40    ## Data info currently being read in
41    current_datainfo = None
42    ## SASdata set currently being read in
[68aa210]43    current_dataset = None
[d72567e]44    ## List of plottable1D objects that should be linked to the current_datainfo
45    data1d = None
46    ## List of plottable2D objects that should be linked to the current_datainfo
47    data2d = None
[68aa210]48    ## Data type name
[ad52d31]49    type_name = "CanSAS 2.0"
[68aa210]50    ## Wildcards
[ad52d31]51    type = ["CanSAS 2.0 HDF5 Files (*.h5)|*.h5"]
[68aa210]52    ## List of allowed extensions
53    ext = ['.h5', '.H5']
54    ## Flag to bypass extension check
55    allow_all = False
56    ## List of files to return
57    output = None
58
59    def read(self, filename):
60        """
[ad52d31]61        This is the general read method that all SasView data_loaders must have.
[68aa210]62
63        :param filename: A path for an HDF5 formatted CanSAS 2D data file.
[d72567e]64        :return: List of Data1D/2D objects and/or a list of errors.
[68aa210]65        """
[ad52d31]66        ## Reinitialize the class when loading a new data file to reset all class variables
[d72567e]67        self.reset_class_variables()
[68aa210]68        ## Check that the file exists
69        if os.path.isfile(filename):
70            basename = os.path.basename(filename)
71            _, extension = os.path.splitext(basename)
72            # If the file type is not allowed, return empty list
73            if extension in self.ext or self.allow_all:
74                ## Load the data file
75                self.raw_data = h5py.File(filename, 'r')
76                ## Read in all child elements of top level SASroot
[d72567e]77                self.read_children(self.raw_data, [])
[ad52d31]78                ## Add the last data set to the list of outputs
[68aa210]79                self.add_data_set()
[995f4eb]80                ## Close the data file
81                self.raw_data.close()
[68aa210]82        ## Return data set(s)
83        return self.output
84
[d72567e]85    def reset_class_variables(self):
86        """
87        Create the reader object and define initial states for class variables
88        """
89        self.current_datainfo = None
90        self.current_dataset = None
91        self.data1d = []
92        self.data2d = []
93        self.raw_data = None
94        self.errors = set()
95        self.logging = []
96        self.output = []
97        self.parent_class = u''
98        self.detector = Detector()
99        self.collimation = Collimation()
100        self.aperture = Aperture()
101        self.process = Process()
102        self.trans_spectrum = TransmissionSpectrum()
103
104    def read_children(self, data, parent_list):
[68aa210]105        """
[ad52d31]106        A recursive method for stepping through the hierarchical data file.
[68aa210]107
108        :param data: h5py Group object of any kind
109        :param parent: h5py Group parent name
110        """
111
112        ## Loop through each element of the parent and process accordingly
113        for key in data.keys():
114            ## Get all information for the current key
115            value = data.get(key)
[d398285]116            if value.attrs.get(u'canSAS_class') is not None:
117                class_name = value.attrs.get(u'canSAS_class')
118            else:
119                class_name = value.attrs.get(u'NX_class')
[68aa210]120            if class_name is not None:
121                class_prog = re.compile(class_name)
122            else:
123                class_prog = re.compile(value.name)
124
125            if isinstance(value, h5py.Group):
[d72567e]126                self.parent_class = class_name
127                parent_list.append(key)
128                ## If this is a new sasentry, store the current data sets and create a fresh Data1D/2D object
[68aa210]129                if class_prog.match(u'SASentry'):
130                    self.add_data_set(key)
[d72567e]131                elif class_prog.match(u'SASdata'):
132                    self._initialize_new_data_set(parent_list)
[ad52d31]133                ## Recursion step to access data within the group
[d72567e]134                self.read_children(value, parent_list)
135                self.add_intermediate()
136                parent_list.remove(key)
[68aa210]137
138            elif isinstance(value, h5py.Dataset):
139                ## If this is a dataset, store the data appropriately
140                data_set = data[key][:]
[7bd6860a]141                unit = self._get_unit(value)
[ac370c5]142
[7bd6860a]143                ## I and Q Data
144                if key == u'I':
145                    if type(self.current_dataset) is plottable_2D:
[ac370c5]146                        self.current_dataset.data = data_set
[7bd6860a]147                        self.current_dataset.zaxis("Intensity", unit)
148                    else:
149                        self.current_dataset.y = data_set.flatten()
150                        self.current_dataset.yaxis("Intensity", unit)
151                    continue
152                elif key == u'Idev':
153                    if type(self.current_dataset) is plottable_2D:
154                        self.current_dataset.err_data = data_set.flatten()
155                    else:
156                        self.current_dataset.dy = data_set.flatten()
157                    continue
158                elif key == u'Q':
159                    self.current_dataset.xaxis("Q", unit)
160                    if type(self.current_dataset) is plottable_2D:
161                        self.current_dataset.q = data_set.flatten()
162                    else:
163                        self.current_dataset.x = data_set.flatten()
164                    continue
165                elif key == u'Qy':
166                    self.current_dataset.yaxis("Q_y", unit)
167                    self.current_dataset.qy_data = data_set.flatten()
168                    continue
169                elif key == u'Qydev':
170                    self.current_dataset.dqy_data = data_set.flatten()
171                    continue
172                elif key == u'Qx':
173                    self.current_dataset.xaxis("Q_x", unit)
174                    self.current_dataset.qx_data = data_set.flatten()
175                    continue
176                elif key == u'Qxdev':
177                    self.current_dataset.dqx_data = data_set.flatten()
178                    continue
179                elif key == u'Mask':
180                    self.current_dataset.mask = data_set.flatten()
181                    continue
[68aa210]182
183                for data_point in data_set:
184                    ## Top Level Meta Data
185                    if key == u'definition':
[d72567e]186                        self.current_datainfo.meta_data['reader'] = data_point
[68aa210]187                    elif key == u'run':
[d72567e]188                        self.current_datainfo.run.append(data_point)
[be88076]189                        try:
190                            run_name = value.attrs['name']
191                            run_dict = {data_point: run_name}
192                            self.current_datainfo.run_name = run_dict
193                        except:
194                            pass
[68aa210]195                    elif key == u'title':
[d72567e]196                        self.current_datainfo.title = data_point
[68aa210]197                    elif key == u'SASnote':
[d72567e]198                        self.current_datainfo.notes.append(data_point)
[68aa210]199
200                    ## Sample Information
[eb98f24]201                    elif key == u'Title' and self.parent_class == u'SASsample': # CanSAS 2.0 format
[d72567e]202                        self.current_datainfo.sample.name = data_point
[ac3353d]203                    elif key == u'ID' and self.parent_class == u'SASsample': # NXcanSAS format
[88d85c6]204                        self.current_datainfo.sample.name = data_point
[d72567e]205                    elif key == u'thickness' and self.parent_class == u'SASsample':
206                        self.current_datainfo.sample.thickness = data_point
207                    elif key == u'temperature' and self.parent_class == u'SASsample':
208                        self.current_datainfo.sample.temperature = data_point
[5e906207]209                    elif key == u'transmission' and self.parent_class == u'SASsample':
210                        self.current_datainfo.sample.transmission = data_point
211                    elif key == u'x_position' and self.parent_class == u'SASsample':
212                        self.current_datainfo.sample.position.x = data_point
213                    elif key == u'y_position' and self.parent_class == u'SASsample':
214                        self.current_datainfo.sample.position.y = data_point
215                    elif key == u'polar_angle' and self.parent_class == u'SASsample':
216                        self.current_datainfo.sample.orientation.x = data_point
217                    elif key == u'azimuthal_angle' and self.parent_class == u'SASsample':
218                        self.current_datainfo.sample.orientation.z = data_point
219                    elif key == u'details' and self.parent_class == u'SASsample':
220                        self.current_datainfo.sample.details.append(data_point)
[68aa210]221
[ad52d31]222                    ## Instrumental Information
[d72567e]223                    elif key == u'name' and self.parent_class == u'SASinstrument':
224                        self.current_datainfo.instrument = data_point
225                    elif key == u'name' and self.parent_class == u'SASdetector':
[ad52d31]226                        self.detector.name = data_point
[d72567e]227                    elif key == u'SDD' and self.parent_class == u'SASdetector':
228                        self.detector.distance = float(data_point)
[d398285]229                        self.detector.distance_unit = unit
[5e906207]230                    elif key == u'slit_length' and self.parent_class == u'SASdetector':
231                        self.detector.slit_length = float(data_point)
232                        self.detector.slit_length_unit = unit
233                    elif key == u'x_position' and self.parent_class == u'SASdetector':
234                        self.detector.offset.x = float(data_point)
235                        self.detector.offset_unit = unit
236                    elif key == u'y_position' and self.parent_class == u'SASdetector':
237                        self.detector.offset.y = float(data_point)
238                        self.detector.offset_unit = unit
239                    elif key == u'polar_angle' and self.parent_class == u'SASdetector':
240                        self.detector.orientation.x = float(data_point)
241                        self.detector.orientation_unit = unit
242                    elif key == u'azimuthal_angle' and self.parent_class == u'SASdetector':
243                        self.detector.orientation.z = float(data_point)
244                        self.detector.orientation_unit = unit
245                    elif key == u'beam_center_x' and self.parent_class == u'SASdetector':
246                        self.detector.beam_center.x = float(data_point)
247                        self.detector.beam_center_unit = unit
248                    elif key == u'beam_center_y' and self.parent_class == u'SASdetector':
249                        self.detector.beam_center.y = float(data_point)
250                        self.detector.beam_center_unit = unit
251                    elif key == u'x_pixel_size' and self.parent_class == u'SASdetector':
252                        self.detector.pixel_size.x = float(data_point)
253                        self.detector.pixel_size_unit = unit
254                    elif key == u'y_pixel_size' and self.parent_class == u'SASdetector':
255                        self.detector.pixel_size.y = float(data_point)
256                        self.detector.pixel_size_unit = unit
[d72567e]257                    elif key == u'SSD' and self.parent_class == u'SAScollimation':
[ad52d31]258                        self.collimation.length = data_point
[d398285]259                        self.collimation.length_unit = unit
[d72567e]260                    elif key == u'name' and self.parent_class == u'SAScollimation':
[ad52d31]261                        self.collimation.name = data_point
262
[68aa210]263                    ## Process Information
[d72567e]264                    elif key == u'name' and self.parent_class == u'SASprocess':
[68aa210]265                        self.process.name = data_point
[eb98f24]266                    elif key == u'Title' and self.parent_class == u'SASprocess': # CanSAS 2.0 format
[68aa210]267                        self.process.name = data_point
[eb98f24]268                    elif key == u'name' and self.parent_class == u'SASprocess': # NXcanSAS format
[88d85c6]269                        self.process.name = data_point
[d72567e]270                    elif key == u'description' and self.parent_class == u'SASprocess':
[68aa210]271                        self.process.description = data_point
[d72567e]272                    elif key == u'date' and self.parent_class == u'SASprocess':
[68aa210]273                        self.process.date = data_point
[d72567e]274                    elif self.parent_class == u'SASprocess':
[ad52d31]275                        self.process.notes.append(data_point)
276
277                    ## Transmission Spectrum
[d72567e]278                    elif key == u'T' and self.parent_class == u'SAStransmission_spectrum':
[ad52d31]279                        self.trans_spectrum.transmission.append(data_point)
[d72567e]280                    elif key == u'Tdev' and self.parent_class == u'SAStransmission_spectrum':
[ad52d31]281                        self.trans_spectrum.transmission_deviation.append(data_point)
[d72567e]282                    elif key == u'lambda' and self.parent_class == u'SAStransmission_spectrum':
[ad52d31]283                        self.trans_spectrum.wavelength.append(data_point)
284
[5e906207]285                    ## Source
[d72567e]286                    elif key == u'wavelength' and self.parent_class == u'SASdata':
287                        self.current_datainfo.source.wavelength = data_point
[5e906207]288                        self.current_datainfo.source.wavelength_unit = unit
289                    elif key == u'incident_wavelength' and self.parent_class == u'SASsource':
290                        self.current_datainfo.source.wavelength = data_point
291                        self.current_datainfo.source.wavelength_unit = unit
292                    elif key == u'wavelength_max' and self.parent_class == u'SASsource':
293                        self.current_datainfo.source.wavelength_max = data_point
294                        self.current_datainfo.source.wavelength_max_unit = unit
295                    elif key == u'wavelength_min' and self.parent_class == u'SASsource':
296                        self.current_datainfo.source.wavelength_min = data_point
297                        self.current_datainfo.source.wavelength_min_unit = unit
298                    elif key == u'wavelength_spread' and self.parent_class == u'SASsource':
299                        self.current_datainfo.source.wavelength_spread = data_point
300                        self.current_datainfo.source.wavelength_spread_unit = unit
301                    elif key == u'beam_size_x' and self.parent_class == u'SASsource':
302                        self.current_datainfo.source.beam_size.x = data_point
303                        self.current_datainfo.source.beam_size_unit = unit
304                    elif key == u'beam_size_y' and self.parent_class == u'SASsource':
305                        self.current_datainfo.source.beam_size.y = data_point
306                        self.current_datainfo.source.beam_size_unit = unit
307                    elif key == u'beam_shape' and self.parent_class == u'SASsource':
308                        self.current_datainfo.source.beam_shape = data_point
[d72567e]309                    elif key == u'radiation' and self.parent_class == u'SASsource':
310                        self.current_datainfo.source.radiation = data_point
311                    elif key == u'transmission' and self.parent_class == u'SASdata':
312                        self.current_datainfo.sample.transmission = data_point
[68aa210]313
314                    ## Everything else goes in meta_data
315                    else:
[d72567e]316                        new_key = self._create_unique_key(self.current_datainfo.meta_data, key)
317                        self.current_datainfo.meta_data[new_key] = data_point
[68aa210]318
319            else:
320                ## I don't know if this reachable code
321                self.errors.add("ShouldNeverHappenException")
322
[d72567e]323    def add_intermediate(self):
[ad52d31]324        """
325        This method stores any intermediate objects within the final data set after fully reading the set.
326
327        :param parent: The NXclass name for the h5py Group object that just finished being processed
328        """
329
[d72567e]330        if self.parent_class == u'SASprocess':
331            self.current_datainfo.process.append(self.process)
[ad52d31]332            self.process = Process()
[d72567e]333        elif self.parent_class == u'SASdetector':
334            self.current_datainfo.detector.append(self.detector)
[ad52d31]335            self.detector = Detector()
[d72567e]336        elif self.parent_class == u'SAStransmission_spectrum':
337            self.current_datainfo.trans_spectrum.append(self.trans_spectrum)
[ad52d31]338            self.trans_spectrum = TransmissionSpectrum()
[d72567e]339        elif self.parent_class == u'SAScollimation':
340            self.current_datainfo.collimation.append(self.collimation)
[ad52d31]341            self.collimation = Collimation()
[d72567e]342        elif self.parent_class == u'SASaperture':
[ad52d31]343            self.collimation.aperture.append(self.aperture)
344            self.aperture = Aperture()
[d72567e]345        elif self.parent_class == u'SASdata':
346            if type(self.current_dataset) is plottable_2D:
347                self.data2d.append(self.current_dataset)
348            elif type(self.current_dataset) is plottable_1D:
349                self.data1d.append(self.current_dataset)
[68aa210]350
351    def final_data_cleanup(self):
352        """
[d72567e]353        Does some final cleanup and formatting on self.current_datainfo and all data1D and data2D objects and then
354        combines the data and info into Data1D and Data2D objects
[68aa210]355        """
356
[d72567e]357        ## Type cast data arrays to float64
358        if len(self.current_datainfo.trans_spectrum) > 0:
[ad52d31]359            spectrum_list = []
[d72567e]360            for spectrum in self.current_datainfo.trans_spectrum:
[ad52d31]361                spectrum.transmission = np.delete(spectrum.transmission, [0])
362                spectrum.transmission = spectrum.transmission.astype(np.float64)
363                spectrum.transmission_deviation = np.delete(spectrum.transmission_deviation, [0])
364                spectrum.transmission_deviation = spectrum.transmission_deviation.astype(np.float64)
365                spectrum.wavelength = np.delete(spectrum.wavelength, [0])
366                spectrum.wavelength = spectrum.wavelength.astype(np.float64)
[d72567e]367                if len(spectrum.transmission) > 0:
368                    spectrum_list.append(spectrum)
369            self.current_datainfo.trans_spectrum = spectrum_list
[68aa210]370
371        ## Append errors to dataset and reset class errors
[d72567e]372        self.current_datainfo.errors = self.errors
[68aa210]373        self.errors.clear()
374
[d72567e]375        ## Combine all plottables with datainfo and append each to output
376        ## Type cast data arrays to float64 and find min/max as appropriate
377        for dataset in self.data2d:
378            dataset.data = dataset.data.astype(np.float64)
379            dataset.err_data = dataset.err_data.astype(np.float64)
380            if dataset.qx_data is not None:
381                dataset.xmin = np.min(dataset.qx_data)
382                dataset.xmax = np.max(dataset.qx_data)
383                dataset.qx_data = dataset.qx_data.astype(np.float64)
384            if dataset.dqx_data is not None:
385                dataset.dqx_data = dataset.dqx_data.astype(np.float64)
386            if dataset.qy_data is not None:
387                dataset.ymin = np.min(dataset.qy_data)
388                dataset.ymax = np.max(dataset.qy_data)
389                dataset.qy_data = dataset.qy_data.astype(np.float64)
390            if dataset.dqy_data is not None:
391                dataset.dqy_data = dataset.dqy_data.astype(np.float64)
392            if dataset.q_data is not None:
393                dataset.q_data = dataset.q_data.astype(np.float64)
394            zeros = np.ones(dataset.data.size, dtype=bool)
395            try:
396                for i in range (0, dataset.mask.size - 1):
397                    zeros[i] = dataset.mask[i]
398            except:
399                self.errors.add(sys.exc_value)
400            dataset.mask = zeros
401            ## Calculate the actual Q matrix
402            try:
403                if dataset.q_data.size <= 1:
404                    dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data + dataset.qy_data * dataset.qy_data)
405            except:
406                dataset.q_data = None
[ac370c5]407
408            if dataset.data.ndim == 2:
409                (n_rows, n_cols) = dataset.data.shape
[479799c]410                dataset.y_bins = dataset.qy_data[0::n_cols]
[ac370c5]411                dataset.x_bins = dataset.qx_data[:n_cols]
412                dataset.data = dataset.data.flatten()
413
[d72567e]414            final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo)
415            self.output.append(final_dataset)
416
417        for dataset in self.data1d:
418            if dataset.x is not None:
419                dataset.x = dataset.x.astype(np.float64)
420                dataset.xmin = np.min(dataset.x)
421                dataset.xmax = np.max(dataset.x)
422            if dataset.y is not None:
423                dataset.y = dataset.y.astype(np.float64)
424                dataset.ymin = np.min(dataset.y)
425                dataset.ymax = np.max(dataset.y)
426            if dataset.dx is not None:
427                dataset.dx = dataset.dx.astype(np.float64)
428            if dataset.dxl is not None:
429                dataset.dxl = dataset.dxl.astype(np.float64)
430            if dataset.dxw is not None:
431                dataset.dxw = dataset.dxw.astype(np.float64)
432            if dataset.dy is not None:
433                dataset.dy = dataset.dy.astype(np.float64)
434            final_dataset = combine_data_info_with_plottable(dataset, self.current_datainfo)
435            self.output.append(final_dataset)
436
[68aa210]437    def add_data_set(self, key=""):
438        """
439        Adds the current_dataset to the list of outputs after preforming final processing on the data and then calls a
440        private method to generate a new data set.
441
442        :param key: NeXus group name for current tree level
443        """
[d72567e]444
445        if self.current_datainfo and self.current_dataset:
[68aa210]446            self.final_data_cleanup()
[d72567e]447        self.data1d = []
448        self.data2d = []
449        self.current_datainfo = DataInfo()
[68aa210]450
[54ba66e]451
[d72567e]452    def _initialize_new_data_set(self, parent_list = None):
[68aa210]453        """
454        A private class method to generate a new 1D or 2D data object based on the type of data within the set.
455        Outside methods should call add_data_set() to be sure any existing data is stored properly.
456
[d72567e]457        :param parent_list: List of names of parent elements
[68aa210]458        """
[d72567e]459
460        if parent_list is None:
461            parent_list = []
462        if self._find_intermediate(parent_list, "Qx"):
463            self.current_dataset = plottable_2D()
[68aa210]464        else:
465            x = np.array(0)
466            y = np.array(0)
[d72567e]467            self.current_dataset = plottable_1D(x, y)
468        self.current_datainfo.filename = self.raw_data.filename
[68aa210]469
[d72567e]470    def _find_intermediate(self, parent_list, basename=""):
[ad52d31]471        """
472        A private class used to find an entry by either using a direct key or knowing the approximate basename.
473
[d72567e]474        :param parent_list: List of parents to the current level in the HDF5 file
475        :param basename: Approximate name of an entry to search for
[ad52d31]476        :return:
477        """
[d72567e]478
479        entry = False
480        key_prog = re.compile(basename)
481        top = self.raw_data
482        for parent in parent_list:
483            top = top.get(parent)
484        for key in top.keys():
485            if (key_prog.match(key)):
486                entry = True
487                break
[ad52d31]488        return entry
489
[68aa210]490    def _create_unique_key(self, dictionary, name, numb=0):
491        """
492        Create a unique key value for any dictionary to prevent overwriting
493        Recurses until a unique key value is found.
494
495        :param dictionary: A dictionary with any number of entries
496        :param name: The index of the item to be added to dictionary
497        :param numb: The number to be appended to the name, starts at 0
[d72567e]498        :return: The new name for the dictionary entry
[68aa210]499        """
500        if dictionary.get(name) is not None:
501            numb += 1
502            name = name.split("_")[0]
503            name += "_{0}".format(numb)
504            name = self._create_unique_key(dictionary, name, numb)
[d398285]505        return name
506
507    def _get_unit(self, value):
508        """
509        Find the unit for a particular value within the h5py dictionary
510
511        :param value: attribute dictionary for a particular value set
[d72567e]512        :return: unit for the value passed to the method
[d398285]513        """
514        unit = value.attrs.get(u'units')
515        if unit == None:
516            unit = value.attrs.get(u'unit')
517        ## Convert the unit formats
518        if unit == "1/A":
519            unit = "A^{-1}"
520        elif unit == "1/cm":
521            unit = "cm^{-1}"
[54ba66e]522        return unit
Note: See TracBrowser for help on using the repository browser.