Changes in / [bfb195e:1353f60] in sasmodels
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
extra/pylint.rc
r6c8db9e r53d0e24 98 98 99 99 # Good variable names which should always be accepted, separated by a comma 100 good-names=i,j,k,ex,Run,_,x,y,z,qx,qy,qz,n,q,dx,dy,dz,id 100 good-names=i,j,k,ex,Run,_,x,y,z,qx,qy,qz,n,q,dx,dy,dz,id,Iq,dIq,Qx,Qy,Qz 101 101 102 102 # Bad variable names which should always be refused, separated by a comma -
sasmodels/bumps_model.py
r9890053 r7e224c2 2 2 Sasmodels core. 3 3 """ 4 import sys, os5 4 import datetime 6 5 … … 9 8 # CRUFT python 2.6 10 9 if not hasattr(datetime.timedelta, 'total_seconds'): 11 def delay(dt): return dt.days *86400 + dt.seconds + 1e-6*dt.microseconds10 def delay(dt): return dt.days * 86400 + dt.seconds + 1e-6 * dt.microseconds 12 11 else: 13 12 def delay(dt): return dt.total_seconds() … … 17 16 try: 18 17 from .kernelcl import load_model as _loader 19 except RuntimeError, exc:18 except RuntimeError, exc: 20 19 import warnings 21 20 warnings.warn(str(exc)) … … 27 26 Load model by name. 28 27 """ 29 sasmodels = __import__('sasmodels.models.' +modelname)28 sasmodels = __import__('sasmodels.models.' + modelname) 30 29 module = getattr(sasmodels.models, modelname, None) 31 30 model = _loader(module, dtype=dtype) … … 41 40 """ 42 41 then = datetime.datetime.now() 43 return lambda: delay(datetime.datetime.now() -then)42 return lambda: delay(datetime.datetime.now() - then) 44 43 45 44 … … 52 51 data = loader.load(filename) 53 52 if data is None: 54 raise IOError("Data %r could not be loaded" %filename)53 raise IOError("Data %r could not be loaded" % filename) 55 54 return data 56 55 … … 65 64 from sas.dataloader.data_info import Data1D 66 65 67 Iq = 100 *np.ones_like(q)66 Iq = 100 * np.ones_like(q) 68 67 dIq = np.sqrt(Iq) 69 data = Data1D(q, Iq, dx=0.05 *q, dy=dIq)68 data = Data1D(q, Iq, dx=0.05 * q, dy=dIq) 70 69 data.filename = "fake data" 71 70 data.qmin, data.qmax = q.min(), q.max() … … 85 84 if qy is None: 86 85 qy = qx 87 Qx, Qy = np.meshgrid(qx,qy)88 Qx, Qy = Qx.flatten(), Qy.flatten()89 Iq = 100 *np.ones_like(Qx)86 Qx, Qy = np.meshgrid(qx, qy) 87 Qx, Qy = Qx.flatten(), Qy.flatten() 88 Iq = 100 * np.ones_like(Qx) 90 89 dIq = np.sqrt(Iq) 91 90 mask = np.ones(len(Iq), dtype='bool') … … 100 99 101 100 # 5% dQ/Q resolution 102 data.dqx_data = 0.05 *Qx103 data.dqy_data = 0.05 *Qy101 data.dqx_data = 0.05 * Qx 102 data.dqy_data = 0.05 * Qy 104 103 105 104 detector = Detector() … … 114 113 data.Q_unit = "1/A" 115 114 data.I_unit = "1/cm" 116 data.q_data = np.sqrt(Qx **2 + Qy**2)115 data.q_data = np.sqrt(Qx ** 2 + Qy ** 2) 117 116 data.xaxis("Q_x", "A^{-1}") 118 117 data.yaxis("Q_y", "A^{-1}") … … 129 128 data.mask = Ringcut(0, radius)(data) 130 129 if outer is not None: 131 data.mask += Ringcut(outer, np.inf)(data)130 data.mask += Ringcut(outer, np.inf)(data) 132 131 else: 133 data.mask = (data.x >=radius)132 data.mask = (data.x >= radius) 134 133 if outer is not None: 135 data.mask &= (data.x <outer)134 data.mask &= (data.x < outer) 136 135 137 136 … … 165 164 import matplotlib.pyplot as plt 166 165 if hasattr(data, 'qx_data'): 167 iq = iq +0166 iq = iq + 0 168 167 valid = np.isfinite(iq) 169 168 if scale == 'log': 170 169 valid[valid] = (iq[valid] > 0) 171 170 iq[valid] = np.log10(iq[valid]) 172 iq[~valid |data.mask] = 0171 iq[~valid | data.mask] = 0 173 172 #plottable = iq 174 plottable = masked_array(iq, ~valid |data.mask)173 plottable = masked_array(iq, ~valid | data.mask) 175 174 xmin, xmax = min(data.qx_data), max(data.qx_data) 176 175 ymin, ymax = min(data.qy_data), max(data.qy_data) 177 176 try: 178 if vmin is None: vmin = iq[valid &~data.mask].min()179 if vmax is None: vmax = iq[valid &~data.mask].max()177 if vmin is None: vmin = iq[valid & ~data.mask].min() 178 if vmax is None: vmax = iq[valid & ~data.mask].max() 180 179 except: 181 180 vmin, vmax = 0, 1 182 plt.imshow(plottable.reshape(128, 128),181 plt.imshow(plottable.reshape(128, 128), 183 182 interpolation='nearest', aspect=1, origin='upper', 184 183 extent=[xmin, xmax, ymin, ymax], vmin=vmin, vmax=vmax) … … 189 188 else: 190 189 idx = np.isfinite(iq) 191 idx[idx] = (iq[idx] >0)190 idx[idx] = (iq[idx] > 0) 192 191 plt.loglog(data.x[idx], iq[idx]) 193 192 … … 206 205 mdata[mdata <= 0] = masked 207 206 mtheory = masked_array(theory, mdata.mask) 208 mresid = masked_array((theory -data.y)/data.dy, mdata.mask)207 mresid = masked_array((theory - data.y) / data.dy, mdata.mask) 209 208 210 209 plt.subplot(121) … … 214 213 plt.subplot(122) 215 214 plt.plot(data.x, mresid, 'x') 216 #plt.axhline(1, color='black', ls='--',lw=1, hold=True)217 #plt.axhline(0, color='black', lw=1, hold=True)218 #plt.axhline(-1, color='black', ls='--',lw=1, hold=True)219 215 220 216 def _plot_sesans(data, theory, view): 221 217 import matplotlib.pyplot as plt 222 resid = (theory - data.y) /data.dy218 resid = (theory - data.y) / data.dy 223 219 plt.subplot(121) 224 220 plt.errorbar(data.x, data.y, yerr=data.dy) … … 236 232 """ 237 233 import matplotlib.pyplot as plt 238 resid = (theory -data.data)/data.err_data234 resid = (theory - data.data) / data.err_data 239 235 plt.subplot(131) 240 236 plot_data(data, data.data, scale=view) … … 270 266 self.model = model 271 267 self.cutoff = cutoff 272 # TODO if isinstance(data,SESANSData1D) 268 # TODO if isinstance(data,SESANSData1D) 273 269 if hasattr(data, 'lam'): 274 270 self.data_type = 'sesans' … … 283 279 if self.data_type == 'sesans': 284 280 q = sesans.make_q(data.sample.zacceptance, data.Rmax) 285 self.index = slice(None, None)281 self.index = slice(None, None) 286 282 self.iq = data.y 287 283 self.diq = data.dy … … 289 285 q_vectors = [q] 290 286 elif self.data_type == 'Iqxy': 291 self.index = (data.mask ==0) & (~np.isnan(data.data))287 self.index = (data.mask == 0) & (~np.isnan(data.data)) 292 288 self.iq = data.data[self.index] 293 289 self.diq = data.err_data[self.index] 294 290 self._theory = np.zeros_like(data.data) 295 291 if not partype['orientation'] and not partype['magnetic']: 296 q_vectors = [np.sqrt(data.qx_data **2+data.qy_data**2)]292 q_vectors = [np.sqrt(data.qx_data ** 2 + data.qy_data ** 2)] 297 293 else: 298 294 q_vectors = [data.qx_data, data.qy_data] 299 295 elif self.data_type == 'Iq': 300 self.index = (data.x >=data.qmin) & (data.x<=data.qmax) & ~np.isnan(data.y)296 self.index = (data.x >= data.qmin) & (data.x <= data.qmax) & ~np.isnan(data.y) 301 297 self.iq = data.y[self.index] 302 298 self.diq = data.dy[self.index] … … 319 315 pars.append(name) 320 316 for name in partype['pd-2d']: 321 for xpart, xdefault,xlimits in [317 for xpart, xdefault, xlimits in [ 322 318 ('_pd', 0, limits), 323 ('_pd_n', 35, (0, 1000)),319 ('_pd_n', 35, (0, 1000)), 324 320 ('_pd_nsigma', 3, (0, 10)), 325 321 ('_pd_type', 'gaussian', None), 326 322 ]: 327 xname = name +xpart323 xname = name + xpart 328 324 xvalue = kw.pop(xname, xdefault) 329 325 if xlimits is not None: … … 333 329 self._parameter_names = pars 334 330 if kw: 335 raise TypeError("unexpected parameters: %s" %(", ".join(sorted(kw.keys()))))331 raise TypeError("unexpected parameters: %s" % (", ".join(sorted(kw.keys())))) 336 332 self.update() 337 333 … … 340 336 341 337 def numpoints(self): 338 """ 339 Return the number of points 340 """ 342 341 return len(self.iq) 343 342 344 343 def parameters(self): 345 return dict((k,getattr(self,k)) for k in self._parameter_names) 344 """ 345 Return a dictionary of parameters 346 """ 347 return dict((k, getattr(self, k)) for k in self._parameter_names) 346 348 347 349 def theory(self): 348 350 if 'theory' not in self._cache: 349 351 if self._fn is None: 350 input = self.model.make_input(self._fn_inputs)351 self._fn = self.model(input )352 353 fixed_pars = [getattr(self, p).value for p in self._fn.fixed_pars]352 input_value = self.model.make_input(self._fn_inputs) 353 self._fn = self.model(input_value) 354 355 fixed_pars = [getattr(self, p).value for p in self._fn.fixed_pars] 354 356 pd_pars = [self._get_weights(p) for p in self._fn.pd_pars] 355 357 #print fixed_pars,pd_pars … … 357 359 #self._theory[:] = self._fn.eval(pars, pd_pars) 358 360 if self.data_type == 'sesans': 359 P = sesans.hankel(self.data.x, self.data.lam *1e-9,360 self.data.sample.thickness /10, self._fn_inputs[0],361 P = sesans.hankel(self.data.x, self.data.lam * 1e-9, 362 self.data.sample.thickness / 10, self._fn_inputs[0], 361 363 self._theory) 362 364 self._cache['theory'] = P … … 367 369 def residuals(self): 368 370 #if np.any(self.err ==0): print "zeros in err" 369 return (self.theory()[self.index] -self.iq)/self.diq371 return (self.theory()[self.index] - self.iq) / self.diq 370 372 371 373 def nllf(self): 372 374 R = self.residuals() 373 375 #if np.any(np.isnan(R)): print "NaN in residuals" 374 return 0.5 *np.sum(R**2)376 return 0.5 * np.sum(R ** 2) 375 377 376 378 def __call__(self): 377 return 2 *self.nllf()/self.dof379 return 2 * self.nllf() / self.dof 378 380 379 381 def plot(self, view='log'): … … 396 398 noise = self.diq[self.index] 397 399 else: 398 noise = 0.01 *noise400 noise = 0.01 * noise 399 401 self.diq[self.index] = noise 400 402 y = self.theory() 401 y += y *np.random.randn(*y.shape)*noise403 y += y * np.random.randn(*y.shape) * noise 402 404 if self.data_type == 'Iq': 403 405 self.data.y[self.index] = y … … 413 415 414 416 def _get_weights(self, par): 417 """ 418 Get parameter dispersion weights 419 """ 415 420 from . import weights 416 421 417 422 relative = self.model.info['partype']['pd-rel'] 418 423 limits = self.model.info['limits'] 419 disperser, value,npts,width,nsigma = [getattr(self, par+ext)420 for ext in ('_pd_type','','_pd_n','_pd','_pd_nsigma')]421 v, w = weights.get_weights(424 disperser, value, npts, width, nsigma = \ 425 [getattr(self, par + ext) for ext in ('_pd_type', '', '_pd_n', '_pd', '_pd_nsigma')] 426 v, w = weights.get_weights( 422 427 disperser, int(npts.value), width.value, nsigma.value, 423 428 value.value, limits[par], par in relative) 424 return v, w/w.max()429 return v, w / w.max() 425 430 426 431 def __getstate__(self): -
sasmodels/models/stickyhardsphere.py
rbfb195e r7e224c2 1 1 # Note: model title and parameter table are inserted automatically 2 r"""This calculates the interparticle structure factor for a hard sphere fluid with a narrow attractive well. A perturbative 3 solution of the Percus-Yevick closure is used. The strength of the attractive well is described in terms of "stickiness" 2 r"""This calculates the interparticle structure factor for a hard sphere fluid with 3 a narrow attractive well. A perturbative solution of the Percus-Yevick closure is used. 4 The strength of the attractive well is described in terms of "stickiness" 4 5 as defined below. The returned value is a dimensionless structure factor, *S(q)*. 5 6 6 The perturb (perturbation parameter), |epsilon|, should be held between 0.01 and 0.1. It is best to hold the 7 perturbation parameter fixed and let the "stickiness" vary to adjust the interaction strength. The stickiness, |tau|, 8 is defined in the equation below and is a function of both the perturbation parameter and the interaction strength. 9 |tau| and |epsilon| are defined in terms of the hard sphere diameter (|sigma| = 2\*\ *R*\ ), the width of the square 10 well, |bigdelta| (same units as *R*), and the depth of the well, *Uo*, in units of kT. From the definition, it is clear 11 that smaller |tau| means stronger attraction. 7 The perturb (perturbation parameter), |epsilon|, should be held between 0.01 and 0.1. 8 It is best to hold the perturbation parameter fixed and let the "stickiness" vary to 9 adjust the interaction strength. The stickiness, |tau|, is defined in the equation 10 below and is a function of both the perturbation parameter and the interaction strength. 11 |tau| and |epsilon| are defined in terms of the hard sphere diameter (|sigma| = 2\*\ *R*\ ), 12 the width of the square well, |bigdelta| (same units as *R*), and the depth of the well, 13 *Uo*, in units of kT. From the definition, it is clear that smaller |tau| means stronger 14 attraction. 12 15 13 16 .. image:: img/stickyhardsphere_228.PNG … … 17 20 .. image:: img/stickyhardsphere_229.PNG 18 21 19 The Percus-Yevick (PY) closure was used for this calculation, and is an adequate closure for an attractive interparticle 20 potential. This solution has been compared to Monte Carlo simulations for a square well fluid, with good agreement. 22 The Percus-Yevick (PY) closure was used for this calculation, and is an adequate closure 23 for an attractive interparticle potential. This solution has been compared to Monte Carlo 24 simulations for a square well fluid, with good agreement. 21 25 22 The true particle volume fraction, |phi|, is not equal to *h*, which appears in most of the reference. The two are 23 related in equation (24) of the reference. The reference also describes the relationship between this perturbation 24 solution and the original sticky hard sphere (or adhesive sphere) model by Baxter. 26 The true particle volume fraction, |phi|, is not equal to *h*, which appears in most of 27 the reference. The two are related in equation (24) of the reference. The reference also 28 describes the relationship between this perturbation solution and the original sticky hard 29 sphere (or adhesive sphere) model by Baxter. 25 30 26 NB: The calculation can go haywire for certain combinations of the input parameters, producing unphysical solutions - in 27 this case errors are reported to the command window and the *S(q)* is set to -1 (so it will disappear on a log-log 28 plot). Use tight bounds to keep the parameters to values that you know are physical (test them) and keep nudging them 29 until the optimization does not hit the constraints. 31 NB: The calculation can go haywire for certain combinations of the input parameters, 32 producing unphysical solutions - in this case errors are reported to the command window and 33 the *S(q)* is set to -1 (so it will disappear on a log-log plot). Use tight bounds to keep 34 the parameters to values that you know are physical (test them) and keep nudging them until 35 the optimization does not hit the constraints. 30 36 31 In sasview the effective radius will be calculated from the parameters used in the form factor P(Q) that this32 S(Q) is combined with.37 In sasview the effective radius will be calculated from the parameters used in the 38 form factor P(Q) that this S(Q) is combined with. 33 39 34 For 2D data: The 2D scattering intensity is calculated in the same way as 1D, where the *q* vector is defined as 40 For 2D data: The 2D scattering intensity is calculated in the same way as 1D, where 41 the *q* vector is defined as 35 42 36 43 .. math:: … … 57 64 58 65 # TODO: refactor so that we pull in the old sansmodels.c_extensions 59 60 from numpy import pi,inf66 67 from numpy import inf 61 68 62 69 name = "stickyhardsphere" … … 64 71 description = """\ 65 72 [Sticky hard sphere structure factor, with Percus-Yevick closure] 66 Interparticle structure factor S(Q)for a hard sphere fluid with 73 Interparticle structure factor S(Q)for a hard sphere fluid with 67 74 a narrow attractive well. Fits are prone to deliver non-physical 68 parameters, use with care and read the references in the full manual. 69 In sasview the effective radius will be calculated from the 75 parameters, use with care and read the references in the full manual. 76 In sasview the effective radius will be calculated from the 70 77 parameters used in P(Q). 71 78 """ … … 73 80 74 81 parameters = [ 75 # [ "name", "units", default, [lower, upper], "type",76 # "description" ],77 [ "effect_radius", "Ang",50.0, [0, inf], "volume",78 "effective radius of hard sphere"],79 [ "volfraction", "",0.2, [0, 0.74], "",80 "volume fraction of hard spheres"],81 [ "perturb", "",0.05, [0.01, 0.1], "",82 "perturbation parameter, epsilon"],83 [ "stickiness", "", 0.20, [-inf,inf], "",84 "stickiness, tau"],82 # [ "name", "units", default, [lower, upper], "type", 83 # "description" ], 84 ["effect_radius", "Ang", 50.0, [0, inf], "volume", 85 "effective radius of hard sphere"], 86 ["volfraction", "", 0.2, [0, 0.74], "", 87 "volume fraction of hard spheres"], 88 ["perturb", "", 0.05, [0.01, 0.1], "", 89 "perturbation parameter, epsilon"], 90 ["stickiness", "", 0.20, [-inf, inf], "", 91 "stickiness, tau"], 85 92 ] 86 93 87 94 # No volume normalization despite having a volume parameter 88 95 # This should perhaps be volume normalized? … … 96 103 double lam,lam2,test,mu,alpha,beta; 97 104 double kk,k2,k3,ds,dc,aq1,aq2,aq3,aq,bq1,bq2,bq3,bq,sq; 98 105 99 106 onemineps = 1.0-perturb; 100 107 eta = volfraction/onemineps/onemineps/onemineps; 101 108 102 109 sig = 2.0 * effect_radius; 103 110 aa = sig/onemineps; … … 153 160 // 154 161 sq = 1.0/(aq*aq +bq*bq); 155 162 156 163 return(sq); 157 164 """ … … 166 173 oldname = 'StickyHSStructure' 167 174 oldpars = dict() 168 demo = dict(effect_radius = 200,volfraction = 0.2,perturb=0.05,stickiness=0.2,effect_radius_pd = 0.1,effect_radius_pd_n = 40) 175 demo = dict(effect_radius=200, volfraction=0.2, perturb=0.05, 176 stickiness=0.2, effect_radius_pd=0.1, effect_radius_pd_n=40) 169 177 170 178
Note: See TracChangeset
for help on using the changeset viewer.