source: sasmodels/sasmodels/direct_model.py @ 745b7bb

core_shell_microgelscostrafo411magnetic_modelrelease_v0.94release_v0.95ticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since 745b7bb was 745b7bb, checked in by Paul Kienzle <pkienzle@…>, 8 years ago

spherical sld: doc cleanup

  • Property mode set to 100644
File size: 13.6 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import build_details
33
34try:
35    from typing import Optional, Dict, Tuple
36except ImportError:
37    pass
38else:
39    from .data import Data
40    from .kernel import Kernel, KernelModel
41    from .modelinfo import Parameter, ParameterSet
42
43def call_kernel(calculator, pars, cutoff=0., mono=False):
44    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
45    """
46    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
47
48    *cutoff* is the limiting value for the product of dispersion weights used
49    to perform the multidimensional dispersion calculation more quickly at a
50    slight cost to accuracy. The default value of *cutoff=0* integrates over
51    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
52    with an error of about 1%, which is usually less than the measurement
53    uncertainty.
54
55    *mono* is True if polydispersity should be set to none on all parameters.
56    """
57    parameters = calculator.info.parameters
58    if mono:
59        active = lambda name: False
60    elif calculator.dim == '1d':
61        active = lambda name: name in parameters.pd_1d
62    elif calculator.dim == '2d':
63        active = lambda name: name in parameters.pd_2d
64    else:
65        active = lambda name: True
66
67    #print("pars",[p.id for p in parameters.call_parameters])
68    vw_pairs = [(get_weights(p, pars) if active(p.name)
69                 else ([pars.get(p.name, p.default)], [1.0]))
70                for p in parameters.call_parameters]
71
72    call_details, values, is_magnetic = build_details(calculator, vw_pairs)
73    #print("values:", values)
74    return calculator(call_details, values, cutoff, is_magnetic)
75
76
77def get_weights(parameter, values):
78    # type: (Parameter, Dict[str, float]) -> Tuple[np.ndarray, np.ndarray]
79    """
80    Generate the distribution for parameter *name* given the parameter values
81    in *pars*.
82
83    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
84    from the *pars* dictionary for parameter value and parameter dispersion.
85    """
86    value = float(values.get(parameter.name, parameter.default))
87    relative = parameter.relative_pd
88    limits = parameter.limits
89    disperser = values.get(parameter.name+'_pd_type', 'gaussian')
90    npts = values.get(parameter.name+'_pd_n', 0)
91    width = values.get(parameter.name+'_pd', 0.0)
92    nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
93    if npts == 0 or width == 0:
94        return [value], [1.0]
95    value, weight = weights.get_weights(
96        disperser, npts, width, nsigma, value, limits, relative)
97    return value, weight / np.sum(weight)
98
99
100def call_profile(model_info, **pars):
101    args = {}
102    for p in model_info.parameters.kernel_parameters:
103        if p.length > 1:
104            value = np.array([pars.get(p.id+str(j), p.default)
105                              for j in range(1, p.length+1)])
106        else:
107            value = pars.get(p.id, p.default)
108        args[p.id] = value
109    x, y = model_info.profile(**args)
110    return x, y, model_info.profile_axes
111
112
113class DataMixin(object):
114    """
115    DataMixin captures the common aspects of evaluating a SAS model for a
116    particular data set, including calculating Iq and evaluating the
117    resolution function.  It is used in particular by :class:`DirectModel`,
118    which evaluates a SAS model parameters as key word arguments to the
119    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
120    model and data for use with the Bumps fitting engine.  It is not
121    currently used by :class:`sasview_model.SasviewModel` since this will
122    require a number of changes to SasView before we can do it.
123
124    :meth:`_interpret_data` initializes the data structures necessary
125    to manage the calculations.  This sets attributes in the child class
126    such as *data_type* and *resolution*.
127
128    :meth:`_calc_theory` evaluates the model at the given control values.
129
130    :meth:`_set_data` sets the intensity data in the data object,
131    possibly with random noise added.  This is useful for simulating a
132    dataset with the results from :meth:`_calc_theory`.
133    """
134    def _interpret_data(self, data, model):
135        # type: (Data, KernelModel) -> None
136        # pylint: disable=attribute-defined-outside-init
137
138        self._data = data
139        self._model = model
140
141        # interpret data
142        if hasattr(data, 'lam'):
143            self.data_type = 'sesans'
144        elif hasattr(data, 'qx_data'):
145            self.data_type = 'Iqxy'
146        elif getattr(data, 'oriented', False):
147            self.data_type = 'Iq-oriented'
148        else:
149            self.data_type = 'Iq'
150
151        if self.data_type == 'sesans':
152            q = sesans.make_q(data.sample.zacceptance, data.Rmax)
153            index = slice(None, None)
154            res = None
155            if data.y is not None:
156                Iq, dIq = data.y, data.dy
157            else:
158                Iq, dIq = None, None
159            #self._theory = np.zeros_like(q)
160            q_vectors = [q]           
161            q_mono = sesans.make_all_q(data)
162        elif self.data_type == 'Iqxy':
163            #if not model.info.parameters.has_2d:
164            #    raise ValueError("not 2D without orientation or magnetic parameters")
165            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
166            qmin = getattr(data, 'qmin', 1e-16)
167            qmax = getattr(data, 'qmax', np.inf)
168            accuracy = getattr(data, 'accuracy', 'Low')
169            index = ~data.mask & (q >= qmin) & (q <= qmax)
170            if data.data is not None:
171                index &= ~np.isnan(data.data)
172                Iq = data.data[index]
173                dIq = data.err_data[index]
174            else:
175                Iq, dIq = None, None
176            res = resolution2d.Pinhole2D(data=data, index=index,
177                                         nsigma=3.0, accuracy=accuracy)
178            #self._theory = np.zeros_like(self.Iq)
179            q_vectors = res.q_calc
180            q_mono = []
181        elif self.data_type == 'Iq':
182            index = (data.x >= data.qmin) & (data.x <= data.qmax)
183            if data.y is not None:
184                index &= ~np.isnan(data.y)
185                Iq = data.y[index]
186                dIq = data.dy[index]
187            else:
188                Iq, dIq = None, None
189            if getattr(data, 'dx', None) is not None:
190                q, dq = data.x[index], data.dx[index]
191                if (dq > 0).any():
192                    res = resolution.Pinhole1D(q, dq)
193                else:
194                    res = resolution.Perfect1D(q)
195            elif (getattr(data, 'dxl', None) is not None
196                  and getattr(data, 'dxw', None) is not None):
197                res = resolution.Slit1D(data.x[index],
198                                        qx_width=data.dxl[index],
199                                        qy_width=data.dxw[index])
200            else:
201                res = resolution.Perfect1D(data.x[index])
202
203            #self._theory = np.zeros_like(self.Iq)
204            q_vectors = [res.q_calc]
205            q_mono = []
206        elif self.data_type == 'Iq-oriented':
207            index = (data.x >= data.qmin) & (data.x <= data.qmax)
208            if data.y is not None:
209                index &= ~np.isnan(data.y)
210                Iq = data.y[index]
211                dIq = data.dy[index]
212            else:
213                Iq, dIq = None, None
214            if (getattr(data, 'dxl', None) is None
215                or getattr(data, 'dxw', None) is None):
216                raise ValueError("oriented sample with 1D data needs slit resolution")
217
218            res = resolution2d.Slit2D(data.x[index],
219                                      qx_width=data.dxw[index],
220                                      qy_width=data.dxl[index])
221            q_vectors = res.q_calc
222            q_mono = []
223        else:
224            raise ValueError("Unknown data type") # never gets here
225
226        # Remember function inputs so we can delay loading the function and
227        # so we can save/restore state
228        self._kernel_inputs = q_vectors
229        self._kernel_mono_inputs = q_mono
230        self._kernel = None
231        self.Iq, self.dIq, self.index = Iq, dIq, index
232        self.resolution = res
233
234    def _set_data(self, Iq, noise=None):
235        # type: (np.ndarray, Optional[float]) -> None
236        # pylint: disable=attribute-defined-outside-init
237        if noise is not None:
238            self.dIq = Iq*noise*0.01
239        dy = self.dIq
240        y = Iq + np.random.randn(*dy.shape) * dy
241        self.Iq = y
242        if self.data_type in ('Iq', 'Iq-oriented'):
243            self._data.dy[self.index] = dy
244            self._data.y[self.index] = y
245        elif self.data_type == 'Iqxy':
246            self._data.data[self.index] = y
247        elif self.data_type == 'sesans':
248            self._data.y[self.index] = y
249        else:
250            raise ValueError("Unknown model")
251
252    def _calc_theory(self, pars, cutoff=0.0):
253        # type: (ParameterSet, float) -> np.ndarray
254        if self._kernel is None:
255            self._kernel = self._model.make_kernel(self._kernel_inputs)
256            self._kernel_mono = (self._model.make_kernel(self._kernel_mono_inputs)
257                                 if self._kernel_mono_inputs else None)
258
259        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
260        # TODO: may want to plot the raw Iq for other than oriented usans
261        self.Iq_calc = None
262        if self.data_type == 'sesans':
263            Iq_mono = (call_kernel(self._kernel_mono, pars, mono=True)
264                       if self._kernel_mono_inputs else None)
265            result = sesans.transform(self._data,
266                                   self._kernel_inputs[0], Iq_calc, 
267                                   self._kernel_mono_inputs, Iq_mono)
268        else:
269            result = self.resolution.apply(Iq_calc)
270            if hasattr(self.resolution, 'nx'):
271                self.Iq_calc = (
272                    self.resolution.qx_calc, self.resolution.qy_calc,
273                    np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
274                )
275        return result       
276
277
278class DirectModel(DataMixin):
279    """
280    Create a calculator object for a model.
281
282    *data* is 1D SAS, 2D SAS or SESANS data
283
284    *model* is a model calculator return from :func:`generate.load_model`
285
286    *cutoff* is the polydispersity weight cutoff.
287    """
288    def __init__(self, data, model, cutoff=1e-5):
289        # type: (Data, KernelModel, float) -> None
290        self.model = model
291        self.cutoff = cutoff
292        # Note: _interpret_data defines the model attributes
293        self._interpret_data(data, model)
294
295    def __call__(self, **pars):
296        # type: (**float) -> np.ndarray
297        return self._calc_theory(pars, cutoff=self.cutoff)
298
299    def simulate_data(self, noise=None, **pars):
300        # type: (Optional[float], **float) -> None
301        """
302        Generate simulated data for the model.
303        """
304        Iq = self.__call__(**pars)
305        self._set_data(Iq, noise=noise)
306
307    def profile(self, **pars):
308        # type: (**float) -> None
309        """
310        Generate a plottable profile.
311        """
312        return call_profile(self.model.info, **pars)
313
314def main():
315    # type: () -> None
316    """
317    Program to evaluate a particular model at a set of q values.
318    """
319    import sys
320    from .data import empty_data1D, empty_data2D
321    from .core import load_model_info, build_model
322
323    if len(sys.argv) < 3:
324        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
325        sys.exit(1)
326    model_name = sys.argv[1]
327    call = sys.argv[2].upper()
328    if call != "ER_VR":
329        try:
330            values = [float(v) for v in call.split(',')]
331        except Exception:
332            values = []
333        if len(values) == 1:
334            q, = values
335            data = empty_data1D([q])
336        elif len(values) == 2:
337            qx, qy = values
338            data = empty_data2D([qx], [qy])
339        else:
340            print("use q or qx,qy or ER or VR")
341            sys.exit(1)
342    else:
343        data = empty_data1D([0.001])  # Data not used in ER/VR
344
345    model_info = load_model_info(model_name)
346    model = build_model(model_info)
347    calculator = DirectModel(data, model)
348    pars = dict((k, float(v))
349                for pair in sys.argv[3:]
350                for k, v in [pair.split('=')])
351    if call == "ER_VR":
352        print(calculator.ER_VR(**pars))
353    else:
354        Iq = calculator(**pars)
355        print(Iq[0])
356
357if __name__ == "__main__":
358    main()
Note: See TracBrowser for help on using the repository browser.