source: sasview/sansmodels/src/sans/models/smearing_2d.py @ 44fa116

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalccostrafo411magnetic_scattrelease-4.1.1release-4.1.2release-4.2.2release_4.0.1ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249ticket885unittest-saveload
Last change on this file since 44fa116 was 6e8b436, checked in by Jae Cho <jhjcho@…>, 14 years ago

minor correction on 2d smearer and added smearer documentation

  • Property mode set to 100644
File size: 11.8 KB
RevLine 
[642b259]1#####################################################################
2#This software was developed by the University of Tennessee as part of the
3#Distributed Data Analysis of Neutron Scattering Experiments (DANSE)
4#project funded by the US National Science Foundation.
5#See the license text in license.txt
6#copyright 2008, University of Tennessee
7######################################################################
8
9## TODO: Need test,and check Gaussian averaging
[04eb1a4]10import numpy
11import math
[642b259]12## Singular point
13SIGMA_ZERO = 1.0e-010
14## Limit of how many sigmas to be covered for the Gaussian smearing
15# default: 2.5 to cover 98.7% of Gaussian
[d4bf55e9]16LIMIT = 3.0
[642b259]17## Defaults
18R_BIN = {'Xhigh':10.0, 'High':5.0,'Med':5.0,'Low':3.0}
19PHI_BIN ={'Xhigh':20.0,'High':12.0,'Med':6.0,'Low':4.0}   
20
21class Smearer2D:
22    """
23    Gaussian Q smearing class for SANS 2d data
24    """
25     
26    def __init__(self, data=None, model=None, index=None, 
[d4bf55e9]27                 limit=LIMIT, accuracy='Low', coords='polar'):
[642b259]28        """
29        Assumption: equally spaced bins in dq_r, dq_phi space.
30       
31        :param data: 2d data used to set the smearing parameters
32        :param model: model function
33        :param index: 1d array with len(data) to define the range of the calculation: elements are given as True or False
34        :param nr: number of bins in dq_r-axis
35        :param nphi: number of bins in dq_phi-axis
[d4bf55e9]36        :param coord: coordinates [string], 'polar' or 'cartesian'
[642b259]37        """
38        ## data
39        self.data = data
40        ## model
41        self.model = model
42        ## Accuracy: Higher stands for more sampling points in both directions of r and phi.
43        self.accuracy = accuracy
44        ## number of bins in r axis for over-sampling
45        self.nr = R_BIN
46        ## number of bins in phi axis for over-sampling
47        self.nphi = PHI_BIN
48        ## maximum nsigmas
49        self.limit = limit
50        self.index = index
[d4bf55e9]51        self.coords = coords
[642b259]52        self.smearer = True
53       
54       
55    def get_data(self):   
56        """
57        get qx_data, qy_data, dqx_data,dqy_data,and calculate phi_data=arctan(qx_data/qy_data)
58        """
59        if self.data == None or self.data.__class__.__name__ == 'Data1D':
60            return None
61        if self.data.dqx_data == None or self.data.dqy_data == None:
62            return None
63        self.qx_data = self.data.qx_data[self.index]
64        self.qy_data = self.data.qy_data[self.index]
[d4bf55e9]65        self.q_data = self.data.q_data[self.index]
66        # Here dqx and dqy mean dq_parr and dq_perp
[642b259]67        self.dqx_data = self.data.dqx_data[self.index]
68        self.dqy_data = self.data.dqy_data[self.index]
69        self.phi_data = numpy.arctan(self.qx_data/self.qy_data)
70        ## Remove singular points if exists
71        self.dqx_data[self.dqx_data<SIGMA_ZERO]=SIGMA_ZERO
72        self.dqy_data[self.dqy_data<SIGMA_ZERO]=SIGMA_ZERO
73        return True
74   
75    def set_accuracy(self, accuracy='Low'):   
76        """
77        Set accuracy.
78       
79        :param accuracy:  string
80        """
81        self.accuracy = accuracy
82
83    def set_smearer(self, smearer=True):
84        """
85        Set whether or not smearer will be used
86       
87        :param smearer: smear object
88       
89        """
90        self.smearer = smearer
91       
92    def set_data(self, data=None):   
93        """
94        Set data.
95       
96        :param data: DataLoader.Data_info type
97        """
98        self.data = data
99 
100           
101    def set_model(self, model=None):   
102        """
103        Set model.
104       
105        :param model: sans.models instance
106        """
107        self.model = model 
108           
109    def set_index(self, index=None):   
110        """
111        Set index.
112       
113        :param index: 1d arrays
114        """
115        self.index = index       
116   
117    def get_value(self):
118        """
[d4bf55e9]119        Over sampling of r_nbins times phi_nbins, calculate Gaussian weights,
120        then find smeared intensity
[642b259]121        For the default values, this is equivalent (but by using numpy array
122        the speed optimized by a factor of ten)to the following: ::
123       
124       
[d4bf55e9]125        Remove the singular points if exists
126        self.dqx_data[self.dqx_data==0]=SIGMA_ZERO
127        self.dqy_data[self.dqy_data==0]=SIGMA_ZERO
128       
129        for phi in range(0,4):
130            for r in range(0,5):
131                n = (phi)*5+(r)
132                r = r+0.25
133                dphi = phi*2.0*math.pi/4.0 + numpy.arctan( \
134                        self.qy_data[index_model]/self.dqy_data[index_model]/ \
135                        self.qx_data[index_model]*/self.dqx_data[index_model])
136                dq = r*sqrt( self.dqx_data[index_model]*\
137                        self.dqx_data[index_model] \
138                    + self.dqy_data[index_model]*self.dqy_data[index_model] )
139                #integrant of exp(-0.5*r*r) r dr at each bins :
140                The integration may not need.
141                weight_res[n] = e^{(-0.5*((r-0.25)*(r-0.25)))}- \
142                                e^{(-0.5*((r-0.25)*(r-0.25)))}
143                #if phi != 0 and r != 0:
144                qx_res = numpy.append(qx_res,self.qx_data[index_model]+ \
145                                                            dq * cos(dphi))
146                qy_res = numpy.append(qy_res,self.qy_data[index_model]+ \
147                                                            dq * sin(dphi))
[642b259]148                   
149        Then compute I(qx_res,qy_res) and do weighted averaging.
150       
151        """
152        valid = self.get_data()
153        if valid == None:
154            return valid
[5a0d01b3]155        # all zero values of dq
[836a762]156        if numpy.all(numpy.fabs(self.dqx_data <= 1.1e-10)) and \
157                        numpy.all(numpy.fabs(self.dqy_data <= 1.1e-10)):
[5a0d01b3]158            self.smearer = False
[d4bf55e9]159 
[642b259]160        if self.smearer == False:
[d4bf55e9]161            return self.model.evalDistribution([self.qx_data, self.qy_data]) 
[04eb1a4]162
[642b259]163        nr = self.nr[self.accuracy]
164        nphi = self.nphi[self.accuracy]
165
166        # data length in the range of self.index
167        len_data = len(self.qx_data)
168        len_datay = len(self.qy_data)
169
170        # Number of bins in the dqr direction (polar coordinate of dqx and dqy)
[d4bf55e9]171        bin_size = self.limit / nr
172        # Total number of bins = # of bins
173        # in dq_r-direction times # of bins in dq_phi-direction
[642b259]174        n_bins = nr * nphi
175        # Mean values of dqr at each bins ,starting from the half of bin size
[d4bf55e9]176        r = bin_size / 2.0 + numpy.arange(nr) * bin_size
[642b259]177        # mean values of qphi at each bines
178        phi = numpy.arange(nphi)
[d4bf55e9]179        dphi = phi * 2.0 * math.pi / nphi
[642b259]180        dphi = dphi.repeat(nr)
[d4bf55e9]181
182        ## Transform to polar coordinate,
183        #  and set dphi at each data points ; 1d array
184        dphi = dphi.repeat(len_data)
185        q_phi = self.qy_data / self.qx_data
186       
187        # Starting angle is different between polar and cartesian coordinates.
188        #if self.coords != 'polar':
189        #    dphi += numpy.arctan( q_phi * self.dqx_data/ \
190        #                     self.dqy_data).repeat(n_bins).reshape(len_data,\
191        #                                        n_bins).transpose().flatten()
192
193        # The angle (phi) of the original q point
194        q_phi = numpy.arctan(q_phi).repeat(n_bins).reshape(len_data,\
195                                                n_bins).transpose().flatten()
196        ## Find Gaussian weight for each dq bins: The weight depends only
197        #  on r-direction (The integration may not need)
198        weight_res = numpy.exp(-0.5 * ((r - bin_size / 2.0) * \
199                                    (r - bin_size / 2.0)))- \
200                                    numpy.exp(-0.5 * ((r + bin_size / 2.0 ) *\
201                                    (r + bin_size / 2.0)))
[6e8b436]202        # No needs of normalization here.
203        #weight_res /= numpy.sum(weight_res)
[d4bf55e9]204        weight_res = weight_res.repeat(nphi).reshape(nr, nphi)
205
206        weight_res = weight_res.transpose().flatten()
[642b259]207       
208        ## Set dr for all dq bins for averaging
209        dr = r.repeat(nphi).reshape(nr,nphi).transpose().flatten()
210        ## Set dqr for all data points
211        dqx = numpy.outer(dr,self.dqx_data).flatten()
212        dqy = numpy.outer(dr,self.dqy_data).flatten()
213
[d4bf55e9]214        qx = self.qx_data.repeat(n_bins).reshape(len_data,\
215                                             n_bins).transpose().flatten()
216        qy = self.qy_data.repeat(n_bins).reshape(len_data,\
217                                             n_bins).transpose().flatten()
218
219        # The polar needs rotation by -q_phi
220        if self.coords == 'polar':
[6e8b436]221            q_r = numpy.sqrt(qx * qx + qy * qy)
222            qx_res = ((dqx*numpy.cos(dphi) + q_r) * numpy.cos(-q_phi) +\
[d4bf55e9]223                           dqy*numpy.sin(dphi) * numpy.sin(-q_phi))
[6e8b436]224            qy_res = (-(dqx*numpy.cos(dphi) + q_r) * numpy.sin(-q_phi) +\
[d4bf55e9]225                           dqy*numpy.sin(dphi) * numpy.cos(-q_phi))
226        else:
227            qx_res = qx +  dqx*numpy.cos(dphi)
[6e8b436]228            qy_res = qy +  dqy*numpy.sin(dphi)
[d4bf55e9]229
[642b259]230        ## Evaluate all points
[d4bf55e9]231        val = self.model.evalDistribution([qx_res, qy_res]) 
[642b259]232
233        ## Reshape into 2d array to use numpy weighted averaging
234        value_res= val.reshape(n_bins,len(self.qx_data))
235
236        ## Averaging with Gaussian weighting: normalization included.
[d4bf55e9]237        value =numpy.average(value_res,axis=0, weights=weight_res)
[642b259]238
239        ## Return the smeared values in the range of self.index
240        return value
241   
242if __name__ == '__main__':
243    ## Test w/ 2D linear function
244    x = 0.001*numpy.arange(1,11)
[d4bf55e9]245    dx = numpy.ones(len(x))*0.0003
[642b259]246    y = 0.001*numpy.arange(1,11)
247    dy = numpy.ones(len(x))*0.001
248    z = numpy.ones(10)
249    dz = numpy.sqrt(z)
250   
251    from DataLoader import Data2D
252    #for i in range(10): print i, 0.001 + i*0.008/9.0
253    #for i in range(100): print i, int(math.floor( (i/ (100/9.0)) ))
254    out = Data2D()
255    out.data = z
256    out.qx_data = x
257    out.qy_data = y
258    out.dqx_data = dx
259    out.dqy_data = dy
[d4bf55e9]260    out.q_data = numpy.sqrt(dx * dx + dy * dy)
[642b259]261    index = numpy.ones(len(x), dtype = bool)
262    out.mask = index
263    from sans.models.LineModel import LineModel
264    model = LineModel()
265    model.setParam("A", 0)
266
267    smear = Smearer2D(out,model,index)
268    #smear.set_accuracy('Xhigh')
269    value = smear.get_value()
270    ## All data are ones, so the smeared should also be ones.
271    print "Data length =",len(value)
[d4bf55e9]272    print " 2D linear function, I = 0 + 1*qy"
273    text = " Gaussian weighted averaging on a 2D linear function will "
274    text += "provides the results same as without the averaging."
275    print text
[642b259]276    print "qx_data", "qy_data", "I_nonsmear", "I_smeared"
277    for ind in range(len(value)):
278        print x[ind],y[ind],model.evalDistribution([x,y])[ind], value[ind]
[d4bf55e9]279       
280"""
281for i in range(len(qx_res)/(128*128)):
282    k = i * 128*128 +64
283
284    print qx_res[k]-qqx[k], qy_res[k]-qqy[k]
285print qqx[64],qqy[64]
286""" 
[642b259]287"""   
288if __name__ == '__main__':
289    ## Another Test w/ constant function
290    x = 0.001*numpy.arange(1,11)
291    dx = numpy.ones(len(x))*0.001
292    y = 0.001*numpy.arange(1,11)
293    dy = numpy.ones(len(x))*0.001
294    z = numpy.ones(10)
295    dz = numpy.sqrt(z)
296   
297    from DataLoader import Data2D
298    #for i in range(10): print i, 0.001 + i*0.008/9.0
299    #for i in range(100): print i, int(math.floor( (i/ (100/9.0)) ))
300    out = Data2D()
301    out.data = z
302    out.qx_data = x
303    out.qy_data = y
304    out.dqx_data = dx
305    out.dqy_data = dy
306    index = numpy.ones(len(x), dtype = bool)
307    out.mask = index
308    from sans.models.Constant import Constant
309    model = Constant()
310
311    value = Smearer2D(out,model,index).get_value()
312    ## All data are ones, so the smeared values should also be ones.
313    print "Data length =",len(value), ", Data=",value
314"""   
Note: See TracBrowser for help on using the repository browser.