source: sasview/DataLoader/smearing_2d.py @ f83b94a

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalccostrafo411magnetic_scattrelease-4.1.1release-4.1.2release-4.2.2release_4.0.1ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249ticket885unittest-saveload
Last change on this file since f83b94a was 6c2d1a1, checked in by Jae Cho <jhjcho@…>, 15 years ago

put more doc

  • Property mode set to 100644
File size: 8.3 KB
Line 
1"""
2This software was developed by the University of Tennessee as part of the
3Distributed Data Analysis of Neutron Scattering Experiments (DANSE)
4project funded by the US National Science Foundation.
5
6See the license text in license.txt
7
8copyright 2009, University of Tennessee
9"""
10## TODO: Need test,and check Gaussian averaging
11import numpy, math,time
12## Singular point
13SIGMA_ZERO = 1.0e-010
14## Limit of how many sigmas to be covered for the Gaussian smearing
15# default: 2.5 to cover 98.7% of Gaussian
16LIMIT = 2.5
17## Defaults
18R_BIN = {'Xhigh':10.0, 'High':5.0,'Med':5.0,'Low':3.0}
19PHI_BIN ={'Xhigh':20.0,'High':12.0,'Med':6.0,'Low':4.0}   
20
21class Smearer2D:
22    """
23        Gaussian Q smearing class for SANS 2d data
24    """
25     
26    def __init__(self, data=None,model=None,index=None,limit=LIMIT,accuracy='Low'):
27        """
28            Assumption: equally spaced bins in dq_r, dq_phi space.
29           
30            @param data: 2d data used to set the smearing parameters
31            @param model: model function
32            @param index: 1d array with len(data) to define the range of the calculation: elements are given as True or False
33            @param nr: number of bins in dq_r-axis
34            @param nphi: number of bins in dq_phi-axis
35        """
36        ## data
37        self.data = data
38        ## model
39        self.model = model
40        ## Accuracy: Higher stands for more sampling points in both directions of r and phi.
41        self.accuracy = accuracy
42        ## number of bins in r axis for over-sampling
43        self.nr = R_BIN
44        ## number of bins in phi axis for over-sampling
45        self.nphi = PHI_BIN
46        ## maximum nsigmas
47        self.limit = limit
48        self.index = index
49        self.smearer = True
50       
51       
52    def get_data(self):   
53        """
54            get qx_data, qy_data, dqx_data,dqy_data,and calculate phi_data=arctan(qx_data/qy_data)
55        """
56        if self.data == None or self.data.__class__.__name__ == 'Data1D':
57            return None
58        if self.data.dqx_data == None or self.data.dqy_data == None:
59            return None
60        self.qx_data = self.data.qx_data[self.index]
61        self.qy_data = self.data.qy_data[self.index]
62        self.dqx_data = self.data.dqx_data[self.index]
63        self.dqy_data = self.data.dqy_data[self.index]
64        self.phi_data = numpy.arctan(self.qx_data/self.qy_data)
65        ## Remove singular points if exists
66        self.dqx_data[self.dqx_data<SIGMA_ZERO]=SIGMA_ZERO
67        self.dqy_data[self.dqy_data<SIGMA_ZERO]=SIGMA_ZERO
68        return True
69   
70    def set_accuracy(self,accuracy='Low'):   
71        """
72            Set accuracy:  string
73        """
74        self.accuracy = accuracy
75
76    def set_smearer(self,smearer = True):
77        """
78            Set whether or not smearer will be used
79        """
80        self.smearer = smearer
81       
82    def set_data(self,data=None):   
83        """
84            Set data:  1d arrays
85        """
86        self.data = data
87 
88           
89    def set_model(self,model=None):   
90        """
91            Set model
92        """
93        self.model = model 
94           
95    def set_index(self,index=None):   
96        """
97            Set index: 1d arrays
98        """
99
100        self.index = index       
101   
102    def get_value(self):
103        """
104            Over sampling of r_nbins times phi_nbins, calculate Gaussian weights, then find smeared intensity
105            # For the default values, this is equivalent (but speed optimized by a factor of ten)to the following:
106            =====================================================================================
107            ## Remove the singular points if exists
108            self.dqx_data[self.dqx_data==0]=SIGMA_ZERO
109            self.dqy_data[self.dqy_data==0]=SIGMA_ZERO
110            for phi in range(0,4):
111                for r in range(0,5):
112                    n = (phi)*5+(r)
113                    r = r+0.25
114                    dphi = phi*2.0*math.pi/4.0 + numpy.arctan(self.qy_data[index_model]/self.dqy_data[index_model]/self.qx_data[index_model]*/self.dqx_data[index_model])
115                    dq = r*numpy.sqrt( self.dqx_data[index_model]*self.dqx_data[index_model] \
116                        + self.dqy_data[index_model]*self.dqy_data[index_model] )
117                    #integrant of math.exp(-0.5*r*r) r dr at each bins : The integration may not need.
118                    weight_res[n] = math.exp(-0.5*((r-0.25)*(r-0.25)))-math.exp(-0.5*((r-0.25)*(r-0.25)))
119                    #if phi !=0 and r != 0:
120                    qx_res=numpy.append(qx_res,self.qx_data[index_model]+ dq*math.cos(dphi))
121                    qy_res=numpy.append(qy_res,self.qy_data[index_model]+ dq*math.sin(dphi))
122            ## Then compute I(qx_res,qy_res) and do weighted averaging.
123            =====================================================================================
124        """
125        valid = self.get_data()
126        if valid == None:
127            return valid
128        if self.smearer == False:
129            return self.model.evalDistribution([self.qx_data,self.qy_data]) 
130        st = time.time()
131        nr = self.nr[self.accuracy]
132        nphi = self.nphi[self.accuracy]
133
134        # data length in the range of self.index
135        len_data = len(self.qx_data)
136        len_datay = len(self.qy_data)
137
138        # Number of bins in the dqr direction (polar coordinate of dqx and dqy)
139        bin_size = self.limit/nr
140        # Total number of bins = # of bins in dq_r-direction times # of bins in dq_phi-direction
141        n_bins = nr * nphi
142        # Mean values of dqr at each bins ,starting from the half of bin size
143        r = bin_size/2.0+numpy.arange(nr)*bin_size
144        # mean values of qphi at each bines
145        phi = numpy.arange(nphi)
146        dphi = phi*2.0*math.pi/nphi
147        dphi = dphi.repeat(nr)
148        ## Transform to polar coordinate, and set dphi at each data points ; 1d array
149        dphi = dphi.repeat(len_data)+numpy.arctan(self.qy_data*self.dqx_data/self.qx_data/self.dqy_data).repeat(n_bins).reshape(len_data,n_bins).transpose().flatten()
150        ## Find Gaussian weight for each dq bins: The weight depends only on r-direction (The integration may not need)
151        weight_res = numpy.exp(-0.5*((r-bin_size/2.0)*(r-bin_size/2.0)))-numpy.exp(-0.5*((r+bin_size/2.0)*(r+bin_size/2.0)))
152        weight_res = weight_res.repeat(nphi).reshape(nr,nphi).transpose().flatten()
153       
154        ## Set dr for all dq bins for averaging
155        dr = r.repeat(nphi).reshape(nr,nphi).transpose().flatten()
156        ## Set dqr for all data points
157        dqx = numpy.outer(dr,self.dqx_data).flatten()
158        dqy = numpy.outer(dr,self.dqy_data).flatten()
159        qx = self.qx_data.repeat(n_bins).reshape(len_data,n_bins).transpose().flatten()
160        qy = self.qy_data.repeat(n_bins).reshape(len_data,n_bins).transpose().flatten()
161
162       
163        ## Over-sampled qx_data and qy_data.
164        qx_res = qx+ dqx*numpy.cos(dphi)
165        qy_res = qy+ dqy*numpy.sin(dphi)
166       
167        ## Evaluate all points
168        val = self.model.evalDistribution([qx_res,qy_res]) 
169
170        ## Reshape into 2d array to use numpy weighted averaging
171        value_res= val.reshape(n_bins,len(self.qx_data))
172
173        ## Averaging with Gaussian weighting: normalization included.
174        value =numpy.average(value_res,axis=0,weights=weight_res)
175
176        ## Return the smeared values in the range of self.index
177        return value
178   
179   
180if __name__ == '__main__':
181    ## Test
182    x = 0.001*numpy.arange(1,11)
183    dx = numpy.ones(len(x))*0.001
184    y = 0.001*numpy.arange(1,11)
185    dy = numpy.ones(len(x))*0.001
186    z = numpy.ones(10)
187    dz = numpy.sqrt(z)
188   
189    from DataLoader import Data2D
190    #for i in range(10): print i, 0.001 + i*0.008/9.0
191    #for i in range(100): print i, int(math.floor( (i/ (100/9.0)) ))
192    out = Data2D()
193    out.data = z
194    out.qx_data = x
195    out.qy_data = y
196    out.dqx_data = dx
197    out.dqy_data = dy
198    index = numpy.ones(len(x), dtype = bool)
199    out.mask = index
200    from sans.models.Constant import Constant
201    model = Constant()
202
203    value = Smearer2D(out,model,index).get_value()
204    ## All data are ones, so the smeared should also be ones.
205    print "Data length =",len(value), ", Data=",value
206   
Note: See TracBrowser for help on using the repository browser.