source: sasview/src/sas/sascalc/dataloader/data_info.py @ 7677b4d

Last change on this file since 7677b4d was 345e7e4, checked in by GitHub <noreply@…>, 8 years ago

Revert "Jurtest2"

  • Property mode set to 100644
File size: 42.8 KB
Line 
1"""
2    Module that contains classes to hold information read from
3    reduced data files.
4
5    A good description of the data members can be found in
6    the CanSAS 1D XML data format:
7
8    http://www.smallangles.net/wgwiki/index.php/cansas1d_documentation
9"""
10#####################################################################
11#This software was developed by the University of Tennessee as part of the
12#Distributed Data Analysis of Neutron Scattering Experiments (DANSE)
13#project funded by the US National Science Foundation.
14#See the license text in license.txt
15#copyright 2008, University of Tennessee
16######################################################################
17
18
19#TODO: Keep track of data manipulation in the 'process' data structure.
20#TODO: This module should be independent of plottables. We should write
21#        an adapter class for plottables when needed.
22
23#from sas.guitools.plottables import Data1D as plottable_1D
24from sas.sascalc.data_util.uncertainty import Uncertainty
25import numpy
26import math
27
28class plottable_sesans1D(object):
29    """
30    SESANS is a place holder for 1D SESANS plottables.
31
32    #TODO: This was directly copied from the plottables_1D. Modified Somewhat.
33    #Class has been updated.
34    """
35    # The presence of these should be mutually
36    # exclusive with the presence of Qdev (dx)
37    x = None
38    y = None
39    lam = None
40    dx = None
41    dy = None
42    dlam = None
43    ## Slit smearing length
44    dxl = None
45    ## Slit smearing width
46    dxw = None
47
48    # Units
49    _xaxis = ''
50    _xunit = ''
51    _yaxis = ''
52    _yunit = ''
53
54    def __init__(self, x, y, lam, dx=None, dy=None, dlam=None):
55#        print "SESANS plottable working"
56        self.x = numpy.asarray(x)
57        self.y = numpy.asarray(y)
58        self.lam = numpy.asarray(lam)
59        if dx is not None:
60            self.dx = numpy.asarray(dx)
61        if dy is not None:
62            self.dy = numpy.asarray(dy)
63        if dlam is not None:
64            self.dlam = numpy.asarray(dlam)
65
66    def xaxis(self, label, unit):
67        """
68        set the x axis label and unit
69        """
70        self._xaxis = label
71        self._xunit = unit
72
73    def yaxis(self, label, unit):
74        """
75        set the y axis label and unit
76        """
77        self._yaxis = label
78        self._yunit = unit
79
80
81class plottable_1D(object):
82    """
83    Data1D is a place holder for 1D plottables.
84    """
85    # The presence of these should be mutually
86    # exclusive with the presence of Qdev (dx)
87    x = None
88    y = None
89    dx = None
90    dy = None
91    ## Slit smearing length
92    dxl = None
93    ## Slit smearing width
94    dxw = None
95
96    # Units
97    _xaxis = ''
98    _xunit = ''
99    _yaxis = ''
100    _yunit = ''
101
102    def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None):
103        self.x = numpy.asarray(x)
104        self.y = numpy.asarray(y)
105        if dx is not None:
106            self.dx = numpy.asarray(dx)
107        if dy is not None:
108            self.dy = numpy.asarray(dy)
109        if dxl is not None:
110            self.dxl = numpy.asarray(dxl)
111        if dxw is not None:
112            self.dxw = numpy.asarray(dxw)
113
114    def xaxis(self, label, unit):
115        """
116        set the x axis label and unit
117        """
118        self._xaxis = label
119        self._xunit = unit
120
121    def yaxis(self, label, unit):
122        """
123        set the y axis label and unit
124        """
125        self._yaxis = label
126        self._yunit = unit
127
128
129class plottable_2D(object):
130    """
131    Data2D is a place holder for 2D plottables.
132    """
133    xmin = None
134    xmax = None
135    ymin = None
136    ymax = None
137    data = None
138    qx_data = None
139    qy_data = None
140    q_data = None
141    err_data = None
142    dqx_data = None
143    dqy_data = None
144    mask = None
145
146    # Units
147    _xaxis = ''
148    _xunit = ''
149    _yaxis = ''
150    _yunit = ''
151    _zaxis = ''
152    _zunit = ''
153
154    def __init__(self, data=None, err_data=None, qx_data=None,
155                 qy_data=None, q_data=None, mask=None,
156                 dqx_data=None, dqy_data=None):
157        self.data = numpy.asarray(data)
158        self.qx_data = numpy.asarray(qx_data)
159        self.qy_data = numpy.asarray(qy_data)
160        self.q_data = numpy.asarray(q_data)
161        self.mask = numpy.asarray(mask)
162        self.err_data = numpy.asarray(err_data)
163        if dqx_data is not None:
164            self.dqx_data = numpy.asarray(dqx_data)
165        if dqy_data is not None:
166            self.dqy_data = numpy.asarray(dqy_data)
167
168    def xaxis(self, label, unit):
169        """
170        set the x axis label and unit
171        """
172        self._xaxis = label
173        self._xunit = unit
174
175    def yaxis(self, label, unit):
176        """
177        set the y axis label and unit
178        """
179        self._yaxis = label
180        self._yunit = unit
181
182    def zaxis(self, label, unit):
183        """
184        set the z axis label and unit
185        """
186        self._zaxis = label
187        self._zunit = unit
188
189
190class Vector(object):
191    """
192    Vector class to hold multi-dimensional objects
193    """
194    ## x component
195    x = None
196    ## y component
197    y = None
198    ## z component
199    z = None
200
201    def __init__(self, x=None, y=None, z=None):
202        """
203        Initialization. Components that are not
204        set a set to None by default.
205
206        :param x: x component
207        :param y: y component
208        :param z: z component
209        """
210        self.x = x
211        self.y = y
212        self.z = z
213
214    def __str__(self):
215        msg = "x = %s\ty = %s\tz = %s" % (str(self.x), str(self.y), str(self.z))
216        return msg
217
218
219class Detector(object):
220    """
221    Class to hold detector information
222    """
223    ## Name of the instrument [string]
224    name = None
225    ## Sample to detector distance [float] [mm]
226    distance = None
227    distance_unit = 'mm'
228    ## Offset of this detector position in X, Y,
229    #(and Z if necessary) [Vector] [mm]
230    offset = None
231    offset_unit = 'm'
232    ## Orientation (rotation) of this detector in roll,
233    # pitch, and yaw [Vector] [degrees]
234    orientation = None
235    orientation_unit = 'degree'
236    ## Center of the beam on the detector in X and Y
237    #(and Z if necessary) [Vector] [mm]
238    beam_center = None
239    beam_center_unit = 'mm'
240    ## Pixel size in X, Y, (and Z if necessary) [Vector] [mm]
241    pixel_size = None
242    pixel_size_unit = 'mm'
243    ## Slit length of the instrument for this detector.[float] [mm]
244    slit_length = None
245    slit_length_unit = 'mm'
246
247    def __init__(self):
248        """
249        Initialize class attribute that are objects...
250        """
251        self.offset = Vector()
252        self.orientation = Vector()
253        self.beam_center = Vector()
254        self.pixel_size = Vector()
255
256    def __str__(self):
257        _str = "Detector:\n"
258        _str += "   Name:         %s\n" % self.name
259        _str += "   Distance:     %s [%s]\n" % \
260            (str(self.distance), str(self.distance_unit))
261        _str += "   Offset:       %s [%s]\n" % \
262            (str(self.offset), str(self.offset_unit))
263        _str += "   Orientation:  %s [%s]\n" % \
264            (str(self.orientation), str(self.orientation_unit))
265        _str += "   Beam center:  %s [%s]\n" % \
266            (str(self.beam_center), str(self.beam_center_unit))
267        _str += "   Pixel size:   %s [%s]\n" % \
268            (str(self.pixel_size), str(self.pixel_size_unit))
269        _str += "   Slit length:  %s [%s]\n" % \
270            (str(self.slit_length), str(self.slit_length_unit))
271        return _str
272
273
274class Aperture(object):
275    ## Name
276    name = None
277    ## Type
278    type = None
279    ## Size name
280    size_name = None
281    ## Aperture size [Vector]
282    size = None
283    size_unit = 'mm'
284    ## Aperture distance [float]
285    distance = None
286    distance_unit = 'mm'
287
288    def __init__(self):
289        self.size = Vector()
290
291
292class Collimation(object):
293    """
294    Class to hold collimation information
295    """
296    ## Name
297    name = None
298    ## Length [float] [mm]
299    length = None
300    length_unit = 'mm'
301    ## Aperture
302    aperture = None
303
304    def __init__(self):
305        self.aperture = []
306
307    def __str__(self):
308        _str = "Collimation:\n"
309        _str += "   Length:       %s [%s]\n" % \
310            (str(self.length), str(self.length_unit))
311        for item in self.aperture:
312            _str += "   Aperture size:%s [%s]\n" % \
313                (str(item.size), str(item.size_unit))
314            _str += "   Aperture_dist:%s [%s]\n" % \
315                (str(item.distance), str(item.distance_unit))
316        return _str
317
318
319class Source(object):
320    """
321    Class to hold source information
322    """
323    ## Name
324    name = None
325    ## Radiation type [string]
326    radiation = None
327    ## Beam size name
328    beam_size_name = None
329    ## Beam size [Vector] [mm]
330    beam_size = None
331    beam_size_unit = 'mm'
332    ## Beam shape [string]
333    beam_shape = None
334    ## Wavelength [float] [Angstrom]
335    wavelength = None
336    wavelength_unit = 'A'
337    ## Minimum wavelength [float] [Angstrom]
338    wavelength_min = None
339    wavelength_min_unit = 'nm'
340    ## Maximum wavelength [float] [Angstrom]
341    wavelength_max = None
342    wavelength_max_unit = 'nm'
343    ## Wavelength spread [float] [Angstrom]
344    wavelength_spread = None
345    wavelength_spread_unit = 'percent'
346
347    def __init__(self):
348        self.beam_size = Vector()
349
350    def __str__(self):
351        _str = "Source:\n"
352        _str += "   Radiation:    %s\n" % str(self.radiation)
353        _str += "   Shape:        %s\n" % str(self.beam_shape)
354        _str += "   Wavelength:   %s [%s]\n" % \
355            (str(self.wavelength), str(self.wavelength_unit))
356        _str += "   Waveln_min:   %s [%s]\n" % \
357            (str(self.wavelength_min), str(self.wavelength_min_unit))
358        _str += "   Waveln_max:   %s [%s]\n" % \
359            (str(self.wavelength_max), str(self.wavelength_max_unit))
360        _str += "   Waveln_spread:%s [%s]\n" % \
361            (str(self.wavelength_spread), str(self.wavelength_spread_unit))
362        _str += "   Beam_size:    %s [%s]\n" % \
363            (str(self.beam_size), str(self.beam_size_unit))
364        return _str
365
366
367"""
368Definitions of radiation types
369"""
370NEUTRON = 'neutron'
371XRAY = 'x-ray'
372MUON = 'muon'
373ELECTRON = 'electron'
374
375
376class Sample(object):
377    """
378    Class to hold the sample description
379    """
380    ## Short name for sample
381    name = ''
382    ## ID
383    ID = ''
384    ## Thickness [float] [mm]
385    thickness = None
386    thickness_unit = 'mm'
387    ## Transmission [float] [fraction]
388    transmission = None
389    ## Temperature [float] [No Default]
390    temperature = None
391    temperature_unit = None
392    ## Position [Vector] [mm]
393    position = None
394    position_unit = 'mm'
395    ## Orientation [Vector] [degrees]
396    orientation = None
397    orientation_unit = 'degree'
398    ## Details
399    details = None
400
401    def __init__(self):
402        self.position = Vector()
403        self.orientation = Vector()
404        self.details = []
405
406    def __str__(self):
407        _str = "Sample:\n"
408        _str += "   ID:           %s\n" % str(self.ID)
409        _str += "   Transmission: %s\n" % str(self.transmission)
410        _str += "   Thickness:    %s [%s]\n" % \
411            (str(self.thickness), str(self.thickness_unit))
412        _str += "   Temperature:  %s [%s]\n" % \
413            (str(self.temperature), str(self.temperature_unit))
414        _str += "   Position:     %s [%s]\n" % \
415            (str(self.position), str(self.position_unit))
416        _str += "   Orientation:  %s [%s]\n" % \
417            (str(self.orientation), str(self.orientation_unit))
418
419        _str += "   Details:\n"
420        for item in self.details:
421            _str += "      %s\n" % item
422
423        return _str
424
425
426class Process(object):
427    """
428    Class that holds information about the processes
429    performed on the data.
430    """
431    name = ''
432    date = ''
433    description = ''
434    term = None
435    notes = None
436
437    def __init__(self):
438        self.term = []
439        self.notes = []
440
441    def is_empty(self):
442        """
443            Return True if the object is empty
444        """
445        return len(self.name) == 0 and len(self.date) == 0 and len(self.description) == 0 \
446            and len(self.term) == 0 and len(self.notes) == 0
447
448    def single_line_desc(self):
449        """
450            Return a single line string representing the process
451        """
452        return "%s %s %s" % (self.name, self.date, self.description)
453
454    def __str__(self):
455        _str = "Process:\n"
456        _str += "   Name:         %s\n" % self.name
457        _str += "   Date:         %s\n" % self.date
458        _str += "   Description:  %s\n" % self.description
459        for item in self.term:
460            _str += "   Term:         %s\n" % item
461        for item in self.notes:
462            _str += "   Note:         %s\n" % item
463        return _str
464
465
466class TransmissionSpectrum(object):
467    """
468    Class that holds information about transmission spectrum
469    for white beams and spallation sources.
470    """
471    name = ''
472    timestamp = ''
473    ## Wavelength (float) [A]
474    wavelength = None
475    wavelength_unit = 'A'
476    ## Transmission (float) [unit less]
477    transmission = None
478    transmission_unit = ''
479    ## Transmission Deviation (float) [unit less]
480    transmission_deviation = None
481    transmission_deviation_unit = ''
482
483    def __init__(self):
484        self.wavelength = []
485        self.transmission = []
486        self.transmission_deviation = []
487
488    def __str__(self):
489        _str = "Transmission Spectrum:\n"
490        _str += "   Name:             \t{0}\n".format(self.name)
491        _str += "   Timestamp:        \t{0}\n".format(self.timestamp)
492        _str += "   Wavelength unit:  \t{0}\n".format(self.wavelength_unit)
493        _str += "   Transmission unit:\t{0}\n".format(self.transmission_unit)
494        _str += "   Trans. Dev. unit:  \t{0}\n".format(\
495                                            self.transmission_deviation_unit)
496        length_list = [len(self.wavelength), len(self.transmission), \
497                len(self.transmission_deviation)]
498        _str += "   Number of Pts:    \t{0}\n".format(max(length_list))
499        return _str
500
501
502class DataInfo(object):
503    """
504    Class to hold the data read from a file.
505    It includes four blocks of data for the
506    instrument description, the sample description,
507    the data itself and any other meta data.
508    """
509    ## Title
510    title = ''
511    ## Run number
512    run = None
513    ## Run name
514    run_name = None
515    ## File name
516    filename = ''
517    ## Notes
518    notes = None
519    ## Processes (Action on the data)
520    process = None
521    ## Instrument name
522    instrument = ''
523    ## Detector information
524    detector = None
525    ## Sample information
526    sample = None
527    ## Source information
528    source = None
529    ## Collimation information
530    collimation = None
531    ## Transmission Spectrum INfo
532    trans_spectrum = None
533    ## Additional meta-data
534    meta_data = None
535    ## Loading errors
536    errors = None
537
538    def __init__(self):
539        """
540        Initialization
541        """
542        ## Title
543        self.title = ''
544        ## Run number
545        self.run = []
546        self.run_name = {}
547        ## File name
548        self.filename = ''
549        ## Notes
550        self.notes = []
551        ## Processes (Action on the data)
552        self.process = []
553        ## Instrument name
554        self.instrument = ''
555        ## Detector information
556        self.detector = []
557        ## Sample information
558        self.sample = Sample()
559        ## Source information
560        self.source = Source()
561        ## Collimation information
562        self.collimation = []
563        ## Transmission Spectrum
564        self.trans_spectrum = []
565        ## Additional meta-data
566        self.meta_data = {}
567        ## Loading errors
568        self.errors = []
569
570    def append_empty_process(self):
571        """
572        """
573        self.process.append(Process())
574
575    def add_notes(self, message=""):
576        """
577        Add notes to datainfo
578        """
579        self.notes.append(message)
580
581    def __str__(self):
582        """
583        Nice printout
584        """
585        _str = "File:            %s\n" % self.filename
586        _str += "Title:           %s\n" % self.title
587        _str += "Run:             %s\n" % str(self.run)
588        _str += "Instrument:      %s\n" % str(self.instrument)
589        _str += "%s\n" % str(self.sample)
590        _str += "%s\n" % str(self.source)
591        for item in self.detector:
592            _str += "%s\n" % str(item)
593        for item in self.collimation:
594            _str += "%s\n" % str(item)
595        for item in self.process:
596            _str += "%s\n" % str(item)
597        for item in self.notes:
598            _str += "%s\n" % str(item)
599        for item in self.trans_spectrum:
600            _str += "%s\n" % str(item)
601        return _str
602
603    # Private method to perform operation. Not implemented for DataInfo,
604    # but should be implemented for each data class inherited from DataInfo
605    # that holds actual data (ex.: Data1D)
606    def _perform_operation(self, other, operation):
607        """
608        Private method to perform operation. Not implemented for DataInfo,
609        but should be implemented for each data class inherited from DataInfo
610        that holds actual data (ex.: Data1D)
611        """
612        return NotImplemented
613
614    def _perform_union(self, other):
615        """
616        Private method to perform union operation. Not implemented for DataInfo,
617        but should be implemented for each data class inherited from DataInfo
618        that holds actual data (ex.: Data1D)
619        """
620        return NotImplemented
621
622    def __add__(self, other):
623        """
624        Add two data sets
625
626        :param other: data set to add to the current one
627        :return: new data set
628        :raise ValueError: raised when two data sets are incompatible
629        """
630        def operation(a, b):
631            return a + b
632        return self._perform_operation(other, operation)
633
634    def __radd__(self, other):
635        """
636        Add two data sets
637
638        :param other: data set to add to the current one
639        :return: new data set
640        :raise ValueError: raised when two data sets are incompatible
641        """
642        def operation(a, b):
643            return b + a
644        return self._perform_operation(other, operation)
645
646    def __sub__(self, other):
647        """
648        Subtract two data sets
649
650        :param other: data set to subtract from the current one
651        :return: new data set
652        :raise ValueError: raised when two data sets are incompatible
653        """
654        def operation(a, b):
655            return a - b
656        return self._perform_operation(other, operation)
657
658    def __rsub__(self, other):
659        """
660        Subtract two data sets
661
662        :param other: data set to subtract from the current one
663        :return: new data set
664        :raise ValueError: raised when two data sets are incompatible
665        """
666        def operation(a, b):
667            return b - a
668        return self._perform_operation(other, operation)
669
670    def __mul__(self, other):
671        """
672        Multiply two data sets
673
674        :param other: data set to subtract from the current one
675        :return: new data set
676        :raise ValueError: raised when two data sets are incompatible
677        """
678        def operation(a, b):
679            return a * b
680        return self._perform_operation(other, operation)
681
682    def __rmul__(self, other):
683        """
684        Multiply two data sets
685
686        :param other: data set to subtract from the current one
687        :return: new data set
688        :raise ValueError: raised when two data sets are incompatible
689        """
690        def operation(a, b):
691            return b * a
692        return self._perform_operation(other, operation)
693
694    def __div__(self, other):
695        """
696        Divided a data set by another
697
698        :param other: data set that the current one is divided by
699        :return: new data set
700        :raise ValueError: raised when two data sets are incompatible
701        """
702        def operation(a, b):
703            return a/b
704        return self._perform_operation(other, operation)
705
706    def __rdiv__(self, other):
707        """
708        Divided a data set by another
709
710        :param other: data set that the current one is divided by
711        :return: new data set
712        :raise ValueError: raised when two data sets are incompatible
713        """
714        def operation(a, b):
715            return b/a
716        return self._perform_operation(other, operation)
717
718    def __or__(self, other):
719        """
720        Union a data set with another
721
722        :param other: data set to be unified
723        :return: new data set
724        :raise ValueError: raised when two data sets are incompatible
725        """
726        return self._perform_union(other)
727
728    def __ror__(self, other):
729        """
730        Union a data set with another
731
732        :param other: data set to be unified
733        :return: new data set
734        :raise ValueError: raised when two data sets are incompatible
735        """
736        return self._perform_union(other)
737
738class SESANSData1D(plottable_sesans1D, DataInfo):
739    """
740    SESANS 1D data class
741    """
742    x_unit = 'nm'
743    y_unit = 'pol'
744
745    def __init__(self, x=None, y=None, lam=None, dx=None, dy=None, dlam=None):
746        DataInfo.__init__(self)
747        plottable_sesans1D.__init__(self, x, y, lam, dx, dy, dlam)
748
749    def __str__(self):
750        """
751        Nice printout
752        """
753        _str = "%s\n" % DataInfo.__str__(self)
754        _str += "Data:\n"
755        _str += "   Type:         %s\n" % self.__class__.__name__
756        _str += "   X-axis:       %s\t[%s]\n" % (self._xaxis, self._xunit)
757        _str += "   Y-axis:       %s\t[%s]\n" % (self._yaxis, self._yunit)
758        _str += "   Length:       %g\n" % len(self.x)
759        return _str
760
761    def clone_without_data(self, length=0, clone=None):
762        """
763        Clone the current object, without copying the data (which
764        will be filled out by a subsequent operation).
765        The data arrays will be initialized to zero.
766
767        :param length: length of the data array to be initialized
768        :param clone: if provided, the data will be copied to clone
769        """
770        from copy import deepcopy
771        if clone is None or not issubclass(clone.__class__, Data1D):
772            x = numpy.zeros(length)
773            dx = numpy.zeros(length)
774            y = numpy.zeros(length)
775            dy = numpy.zeros(length)
776            clone = Data1D(x, y, dx=dx, dy=dy)
777
778        clone.title = self.title
779        clone.run = self.run
780        clone.filename = self.filename
781        clone.instrument = self.instrument
782        clone.notes = deepcopy(self.notes)
783        clone.process = deepcopy(self.process)
784        clone.detector = deepcopy(self.detector)
785        clone.sample = deepcopy(self.sample)
786        clone.source = deepcopy(self.source)
787        clone.collimation = deepcopy(self.collimation)
788        clone.trans_spectrum = deepcopy(self.trans_spectrum)
789        clone.meta_data = deepcopy(self.meta_data)
790        clone.errors = deepcopy(self.errors)
791
792        return clone
793
794class Data1D(plottable_1D, DataInfo):
795    """
796    1D data class
797    """
798    x_unit = '1/A'
799    y_unit = '1/cm'
800
801    def __init__(self, x, y, dx=None, dy=None):
802        DataInfo.__init__(self)
803        plottable_1D.__init__(self, x, y, dx, dy)
804
805    def __str__(self):
806        """
807        Nice printout
808        """
809        _str = "%s\n" % DataInfo.__str__(self)
810        _str += "Data:\n"
811        _str += "   Type:         %s\n" % self.__class__.__name__
812        _str += "   X-axis:       %s\t[%s]\n" % (self._xaxis, self._xunit)
813        _str += "   Y-axis:       %s\t[%s]\n" % (self._yaxis, self._yunit)
814        _str += "   Length:       %g\n" % len(self.x)
815        return _str
816
817    def is_slit_smeared(self):
818        """
819        Check whether the data has slit smearing information
820        :return: True is slit smearing info is present, False otherwise
821        """
822        def _check(v):
823            if (v.__class__ == list or v.__class__ == numpy.ndarray) \
824                and len(v) > 0 and min(v) > 0:
825                return True
826            return False
827        return _check(self.dxl) or _check(self.dxw)
828
829    def clone_without_data(self, length=0, clone=None):
830        """
831        Clone the current object, without copying the data (which
832        will be filled out by a subsequent operation).
833        The data arrays will be initialized to zero.
834
835        :param length: length of the data array to be initialized
836        :param clone: if provided, the data will be copied to clone
837        """
838        from copy import deepcopy
839
840        if clone is None or not issubclass(clone.__class__, Data1D):
841            x = numpy.zeros(length)
842            dx = numpy.zeros(length)
843            y = numpy.zeros(length)
844            dy = numpy.zeros(length)
845            clone = Data1D(x, y, dx=dx, dy=dy)
846
847        clone.title = self.title
848        clone.run = self.run
849        clone.filename = self.filename
850        clone.instrument = self.instrument
851        clone.notes = deepcopy(self.notes)
852        clone.process = deepcopy(self.process)
853        clone.detector = deepcopy(self.detector)
854        clone.sample = deepcopy(self.sample)
855        clone.source = deepcopy(self.source)
856        clone.collimation = deepcopy(self.collimation)
857        clone.trans_spectrum = deepcopy(self.trans_spectrum)
858        clone.meta_data = deepcopy(self.meta_data)
859        clone.errors = deepcopy(self.errors)
860
861        return clone
862
863    def _validity_check(self, other):
864        """
865        Checks that the data lengths are compatible.
866        Checks that the x vectors are compatible.
867        Returns errors vectors equal to original
868        errors vectors if they were present or vectors
869        of zeros when none was found.
870
871        :param other: other data set for operation
872        :return: dy for self, dy for other [numpy arrays]
873        :raise ValueError: when lengths are not compatible
874        """
875        dy_other = None
876        if isinstance(other, Data1D):
877            # Check that data lengths are the same
878            if len(self.x) != len(other.x) or \
879                len(self.y) != len(other.y):
880                msg = "Unable to perform operation: data length are not equal"
881                raise ValueError, msg
882            # Here we could also extrapolate between data points
883            TOLERANCE = 0.01
884            for i in range(len(self.x)):
885                if math.fabs((self.x[i] - other.x[i])/self.x[i]) > TOLERANCE:
886                    msg = "Incompatible data sets: x-values do not match"
887                    raise ValueError, msg
888
889            # Check that the other data set has errors, otherwise
890            # create zero vector
891            dy_other = other.dy
892            if other.dy == None or (len(other.dy) != len(other.y)):
893                dy_other = numpy.zeros(len(other.y))
894
895        # Check that we have errors, otherwise create zero vector
896        dy = self.dy
897        if self.dy == None or (len(self.dy) != len(self.y)):
898            dy = numpy.zeros(len(self.y))
899
900        return dy, dy_other
901
902    def _perform_operation(self, other, operation):
903        """
904        """
905        # First, check the data compatibility
906        dy, dy_other = self._validity_check(other)
907        result = self.clone_without_data(len(self.x))
908        if self.dxw == None:
909            result.dxw = None
910        else:
911            result.dxw = numpy.zeros(len(self.x))
912        if self.dxl == None:
913            result.dxl = None
914        else:
915            result.dxl = numpy.zeros(len(self.x))
916
917        for i in range(len(self.x)):
918            result.x[i] = self.x[i]
919            if self.dx is not None and len(self.x) == len(self.dx):
920                result.dx[i] = self.dx[i]
921            if self.dxw is not None and len(self.x) == len(self.dxw):
922                result.dxw[i] = self.dxw[i]
923            if self.dxl is not None and len(self.x) == len(self.dxl):
924                result.dxl[i] = self.dxl[i]
925
926            a = Uncertainty(self.y[i], dy[i]**2)
927            if isinstance(other, Data1D):
928                b = Uncertainty(other.y[i], dy_other[i]**2)
929                if other.dx is not None:
930                    result.dx[i] *= self.dx[i]
931                    result.dx[i] += (other.dx[i]**2)
932                    result.dx[i] /= 2
933                    result.dx[i] = math.sqrt(result.dx[i])
934                if result.dxl is not None and other.dxl is not None:
935                    result.dxl[i] *= self.dxl[i]
936                    result.dxl[i] += (other.dxl[i]**2)
937                    result.dxl[i] /= 2
938                    result.dxl[i] = math.sqrt(result.dxl[i])
939            else:
940                b = other
941
942            output = operation(a, b)
943            result.y[i] = output.x
944            result.dy[i] = math.sqrt(math.fabs(output.variance))
945        return result
946
947    def _validity_check_union(self, other):
948        """
949        Checks that the data lengths are compatible.
950        Checks that the x vectors are compatible.
951        Returns errors vectors equal to original
952        errors vectors if they were present or vectors
953        of zeros when none was found.
954
955        :param other: other data set for operation
956        :return: bool
957        :raise ValueError: when data types are not compatible
958        """
959        if not isinstance(other, Data1D):
960            msg = "Unable to perform operation: different types of data set"
961            raise ValueError, msg
962        return True
963
964    def _perform_union(self, other):
965        """
966        """
967        # First, check the data compatibility
968        self._validity_check_union(other)
969        result = self.clone_without_data(len(self.x) + len(other.x))
970        if self.dy == None or other.dy is None:
971            result.dy = None
972        else:
973            result.dy = numpy.zeros(len(self.x) + len(other.x))
974        if self.dx == None or other.dx is None:
975            result.dx = None
976        else:
977            result.dx = numpy.zeros(len(self.x) + len(other.x))
978        if self.dxw == None or other.dxw is None:
979            result.dxw = None
980        else:
981            result.dxw = numpy.zeros(len(self.x) + len(other.x))
982        if self.dxl == None or other.dxl is None:
983            result.dxl = None
984        else:
985            result.dxl = numpy.zeros(len(self.x) + len(other.x))
986
987        result.x = numpy.append(self.x, other.x)
988        #argsorting
989        ind = numpy.argsort(result.x)
990        result.x = result.x[ind]
991        result.y = numpy.append(self.y, other.y)
992        result.y = result.y[ind]
993        if result.dy != None:
994            result.dy = numpy.append(self.dy, other.dy)
995            result.dy = result.dy[ind]
996        if result.dx is not None:
997            result.dx = numpy.append(self.dx, other.dx)
998            result.dx = result.dx[ind]
999        if result.dxw is not None:
1000            result.dxw = numpy.append(self.dxw, other.dxw)
1001            result.dxw = result.dxw[ind]
1002        if result.dxl is not None:
1003            result.dxl = numpy.append(self.dxl, other.dxl)
1004            result.dxl = result.dxl[ind]
1005        return result
1006
1007
1008class Data2D(plottable_2D, DataInfo):
1009    """
1010    2D data class
1011    """
1012    ## Units for Q-values
1013    Q_unit = '1/A'
1014    ## Units for I(Q) values
1015    I_unit = '1/cm'
1016    ## Vector of Q-values at the center of each bin in x
1017    x_bins = None
1018    ## Vector of Q-values at the center of each bin in y
1019    y_bins = None
1020
1021    def __init__(self, data=None, err_data=None, qx_data=None,
1022                 qy_data=None, q_data=None, mask=None,
1023                 dqx_data=None, dqy_data=None):
1024        self.y_bins = []
1025        self.x_bins = []
1026        DataInfo.__init__(self)
1027        plottable_2D.__init__(self, data, err_data, qx_data,
1028                              qy_data, q_data, mask, dqx_data, dqy_data)
1029        if len(self.detector) > 0:
1030            raise RuntimeError, "Data2D: Detector bank already filled at init"
1031
1032    def __str__(self):
1033        _str = "%s\n" % DataInfo.__str__(self)
1034        _str += "Data:\n"
1035        _str += "   Type:         %s\n" % self.__class__.__name__
1036        _str += "   X- & Y-axis:  %s\t[%s]\n" % (self._yaxis, self._yunit)
1037        _str += "   Z-axis:       %s\t[%s]\n" % (self._zaxis, self._zunit)
1038        _str += "   Length:       %g \n" % (len(self.data))
1039        _str += "   Shape:        (%d, %d)\n" % (len(self.y_bins), len(self.x_bins))
1040        return _str
1041
1042    def clone_without_data(self, length=0, clone=None):
1043        """
1044        Clone the current object, without copying the data (which
1045        will be filled out by a subsequent operation).
1046        The data arrays will be initialized to zero.
1047
1048        :param length: length of the data array to be initialized
1049        :param clone: if provided, the data will be copied to clone
1050        """
1051        from copy import deepcopy
1052
1053        if clone is None or not issubclass(clone.__class__, Data2D):
1054            data = numpy.zeros(length)
1055            err_data = numpy.zeros(length)
1056            qx_data = numpy.zeros(length)
1057            qy_data = numpy.zeros(length)
1058            q_data = numpy.zeros(length)
1059            mask = numpy.zeros(length)
1060            dqx_data = None
1061            dqy_data = None
1062            clone = Data2D(data=data, err_data=err_data,
1063                           qx_data=qx_data, qy_data=qy_data,
1064                           q_data=q_data, mask=mask)
1065
1066        clone.title = self.title
1067        clone.run = self.run
1068        clone.filename = self.filename
1069        clone.instrument = self.instrument
1070        clone.notes = deepcopy(self.notes)
1071        clone.process = deepcopy(self.process)
1072        clone.detector = deepcopy(self.detector)
1073        clone.sample = deepcopy(self.sample)
1074        clone.source = deepcopy(self.source)
1075        clone.collimation = deepcopy(self.collimation)
1076        clone.trans_spectrum = deepcopy(self.trans_spectrum)
1077        clone.meta_data = deepcopy(self.meta_data)
1078        clone.errors = deepcopy(self.errors)
1079
1080        return clone
1081
1082    def _validity_check(self, other):
1083        """
1084        Checks that the data lengths are compatible.
1085        Checks that the x vectors are compatible.
1086        Returns errors vectors equal to original
1087        errors vectors if they were present or vectors
1088        of zeros when none was found.
1089
1090        :param other: other data set for operation
1091        :return: dy for self, dy for other [numpy arrays]
1092        :raise ValueError: when lengths are not compatible
1093        """
1094        err_other = None
1095        TOLERANCE = 0.01
1096        if isinstance(other, Data2D):
1097            # Check that data lengths are the same
1098            if len(self.data) != len(other.data) or \
1099                len(self.qx_data) != len(other.qx_data) or \
1100                len(self.qy_data) != len(other.qy_data):
1101                msg = "Unable to perform operation: data length are not equal"
1102                raise ValueError, msg
1103            for ind in range(len(self.data)):
1104                if math.fabs((self.qx_data[ind] - other.qx_data[ind])/self.qx_data[ind]) > TOLERANCE:
1105                    msg = "Incompatible data sets: qx-values do not match: %s %s" % (self.qx_data[ind], other.qx_data[ind])
1106                    raise ValueError, msg
1107                if math.fabs((self.qy_data[ind] - other.qy_data[ind])/self.qy_data[ind]) > TOLERANCE:
1108                    msg = "Incompatible data sets: qy-values do not match: %s %s" % (self.qy_data[ind], other.qy_data[ind])
1109                    raise ValueError, msg
1110
1111            # Check that the scales match
1112            err_other = other.err_data
1113            if other.err_data == None or \
1114                (len(other.err_data) != len(other.data)):
1115                err_other = numpy.zeros(len(other.data))
1116
1117        # Check that we have errors, otherwise create zero vector
1118        err = self.err_data
1119        if self.err_data == None or \
1120            (len(self.err_data) != len(self.data)):
1121            err = numpy.zeros(len(other.data))
1122        return err, err_other
1123
1124    def _perform_operation(self, other, operation):
1125        """
1126        Perform 2D operations between data sets
1127
1128        :param other: other data set
1129        :param operation: function defining the operation
1130        """
1131        # First, check the data compatibility
1132        dy, dy_other = self._validity_check(other)
1133        result = self.clone_without_data(numpy.size(self.data))
1134        if self.dqx_data == None or self.dqy_data == None:
1135            result.dqx_data = None
1136            result.dqy_data = None
1137        else:
1138            result.dqx_data = numpy.zeros(len(self.data))
1139            result.dqy_data = numpy.zeros(len(self.data))
1140        for i in range(numpy.size(self.data)):
1141            result.data[i] = self.data[i]
1142            if self.err_data is not None and \
1143                numpy.size(self.data) == numpy.size(self.err_data):
1144                result.err_data[i] = self.err_data[i]
1145            if self.dqx_data is not None:
1146                result.dqx_data[i] = self.dqx_data[i]
1147            if self.dqy_data is not None:
1148                result.dqy_data[i] = self.dqy_data[i]
1149            result.qx_data[i] = self.qx_data[i]
1150            result.qy_data[i] = self.qy_data[i]
1151            result.q_data[i] = self.q_data[i]
1152            result.mask[i] = self.mask[i]
1153
1154            a = Uncertainty(self.data[i], dy[i]**2)
1155            if isinstance(other, Data2D):
1156                b = Uncertainty(other.data[i], dy_other[i]**2)
1157                if other.dqx_data is not None and \
1158                        result.dqx_data is not None:
1159                    result.dqx_data[i] *= self.dqx_data[i]
1160                    result.dqx_data[i] += (other.dqx_data[i]**2)
1161                    result.dqx_data[i] /= 2
1162                    result.dqx_data[i] = math.sqrt(result.dqx_data[i])
1163                if other.dqy_data is not None and \
1164                        result.dqy_data is not None:
1165                    result.dqy_data[i] *= self.dqy_data[i]
1166                    result.dqy_data[i] += (other.dqy_data[i]**2)
1167                    result.dqy_data[i] /= 2
1168                    result.dqy_data[i] = math.sqrt(result.dqy_data[i])
1169            else:
1170                b = other
1171            output = operation(a, b)
1172            result.data[i] = output.x
1173            result.err_data[i] = math.sqrt(math.fabs(output.variance))
1174        return result
1175
1176    def _validity_check_union(self, other):
1177        """
1178        Checks that the data lengths are compatible.
1179        Checks that the x vectors are compatible.
1180        Returns errors vectors equal to original
1181        errors vectors if they were present or vectors
1182        of zeros when none was found.
1183
1184        :param other: other data set for operation
1185        :return: bool
1186        :raise ValueError: when data types are not compatible
1187        """
1188        if not isinstance(other, Data2D):
1189            msg = "Unable to perform operation: different types of data set"
1190            raise ValueError, msg
1191        return True
1192
1193    def _perform_union(self, other):
1194        """
1195        Perform 2D operations between data sets
1196
1197        :param other: other data set
1198        :param operation: function defining the operation
1199        """
1200        # First, check the data compatibility
1201        self._validity_check_union(other)
1202        result = self.clone_without_data(numpy.size(self.data) + \
1203                                         numpy.size(other.data))
1204        result.xmin = self.xmin
1205        result.xmax = self.xmax
1206        result.ymin = self.ymin
1207        result.ymax = self.ymax
1208        if self.dqx_data == None or self.dqy_data == None or \
1209                other.dqx_data == None or other.dqy_data == None:
1210            result.dqx_data = None
1211            result.dqy_data = None
1212        else:
1213            result.dqx_data = numpy.zeros(len(self.data) + \
1214                                         numpy.size(other.data))
1215            result.dqy_data = numpy.zeros(len(self.data) + \
1216                                         numpy.size(other.data))
1217
1218        result.data = numpy.append(self.data, other.data)
1219        result.qx_data = numpy.append(self.qx_data, other.qx_data)
1220        result.qy_data = numpy.append(self.qy_data, other.qy_data)
1221        result.q_data = numpy.append(self.q_data, other.q_data)
1222        result.mask = numpy.append(self.mask, other.mask)
1223        if result.err_data is not None:
1224            result.err_data = numpy.append(self.err_data, other.err_data)
1225        if self.dqx_data is not None:
1226            result.dqx_data = numpy.append(self.dqx_data, other.dqx_data)
1227        if self.dqy_data is not None:
1228            result.dqy_data = numpy.append(self.dqy_data, other.dqy_data)
1229
1230        return result
1231
1232
1233def combine_data_info_with_plottable(data, datainfo):
1234    """
1235    A function that combines the DataInfo data in self.current_datainto with a plottable_1D or 2D data object.
1236
1237    :param data: A plottable_1D or plottable_2D data object
1238    :return: A fully specified Data1D or Data2D object
1239    """
1240
1241    final_dataset = None
1242    if isinstance(data, plottable_1D):
1243        final_dataset = Data1D(data.x, data.y)
1244        final_dataset.dx = data.dx
1245        final_dataset.dy = data.dy
1246        final_dataset.dxl = data.dxl
1247        final_dataset.dxw = data.dxw
1248        final_dataset.xaxis(data._xaxis, data._xunit)
1249        final_dataset.yaxis(data._yaxis, data._yunit)
1250    elif isinstance(data, plottable_2D):
1251        final_dataset = Data2D(data.data, data.err_data, data.qx_data, data.qy_data, data.q_data,
1252                               data.mask, data.dqx_data, data.dqy_data)
1253        final_dataset.xaxis(data._xaxis, data._xunit)
1254        final_dataset.yaxis(data._yaxis, data._yunit)
1255        final_dataset.zaxis(data._zaxis, data._zunit)
1256        final_dataset.x_bins = data.x_bins
1257        final_dataset.y_bins = data.y_bins
1258    else:
1259        return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \
1260                        "plottable2d data object"
1261        return return_string
1262
1263    final_dataset.xmax = data.xmax
1264    final_dataset.ymax = data.ymax
1265    final_dataset.xmin = data.xmin
1266    final_dataset.ymin = data.ymin
1267    final_dataset.title = datainfo.title
1268    final_dataset.run = datainfo.run
1269    final_dataset.run_name = datainfo.run_name
1270    final_dataset.filename = datainfo.filename
1271    final_dataset.notes = datainfo.notes
1272    final_dataset.process = datainfo.process
1273    final_dataset.instrument = datainfo.instrument
1274    final_dataset.detector = datainfo.detector
1275    final_dataset.sample = datainfo.sample
1276    final_dataset.source = datainfo.source
1277    final_dataset.collimation = datainfo.collimation
1278    final_dataset.trans_spectrum = datainfo.trans_spectrum
1279    final_dataset.meta_data = datainfo.meta_data
1280    final_dataset.errors = datainfo.errors
1281    return final_dataset
Note: See TracBrowser for help on using the repository browser.