source: sasmodels/sasmodels/direct_model.py @ 2c4a190

core_shell_microgelsmagnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since 2c4a190 was 2c4a190, checked in by Paul Kienzle <pkienzle@…>, 5 years ago

simplify use of multiple scattering in bumps fits

  • Property mode set to 100644
File size: 17.5 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import make_kernel_args, dispersion_mesh
33from .modelinfo import DEFAULT_BACKGROUND
34
35# pylint: disable=unused-import
36try:
37    from typing import Optional, Dict, Tuple
38except ImportError:
39    pass
40else:
41    from .data import Data
42    from .kernel import Kernel, KernelModel
43    from .modelinfo import Parameter, ParameterSet
44# pylint: enable=unused-import
45
46def call_kernel(calculator, pars, cutoff=0., mono=False):
47    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
48    """
49    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
50
51    *cutoff* is the limiting value for the product of dispersion weights used
52    to perform the multidimensional dispersion calculation more quickly at a
53    slight cost to accuracy. The default value of *cutoff=0* integrates over
54    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
55    with an error of about 1%, which is usually less than the measurement
56    uncertainty.
57
58    *mono* is True if polydispersity should be set to none on all parameters.
59    """
60    mesh = get_mesh(calculator.info, pars, dim=calculator.dim, mono=mono)
61    #print("pars", list(zip(*mesh))[0])
62    call_details, values, is_magnetic = make_kernel_args(calculator, mesh)
63    #print("values:", values)
64    return calculator(call_details, values, cutoff, is_magnetic)
65
66def call_ER(model_info, pars):
67    # type: (ModelInfo, ParameterSet) -> float
68    """
69    Call the model ER function using *values*.
70
71    *model_info* is either *model.info* if you have a loaded model,
72    or *kernel.info* if you have a model kernel prepared for evaluation.
73    """
74    if model_info.ER is None:
75        return 1.0
76    elif not model_info.parameters.form_volume_parameters:
77        # handle the case where ER is provided but model is not polydisperse
78        return model_info.ER()
79    else:
80        value, weight = _vol_pars(model_info, pars)
81        individual_radii = model_info.ER(*value)
82        return np.sum(weight*individual_radii) / np.sum(weight)
83
84
85def call_VR(model_info, pars):
86    # type: (ModelInfo, ParameterSet) -> float
87    """
88    Call the model VR function using *pars*.
89
90    *model_info* is either *model.info* if you have a loaded model,
91    or *kernel.info* if you have a model kernel prepared for evaluation.
92    """
93    if model_info.VR is None:
94        return 1.0
95    elif not model_info.parameters.form_volume_parameters:
96        # handle the case where ER is provided but model is not polydisperse
97        return model_info.VR()
98    else:
99        value, weight = _vol_pars(model_info, pars)
100        whole, part = model_info.VR(*value)
101        return np.sum(weight*part)/np.sum(weight*whole)
102
103
104def call_profile(model_info, **pars):
105    # type: (ModelInfo, ...) -> Tuple[np.ndarray, np.ndarray, Tuple[str, str]]
106    """
107    Returns the profile *x, y, (xlabel, ylabel)* representing the model.
108    """
109    args = {}
110    for p in model_info.parameters.kernel_parameters:
111        if p.length > 1:
112            value = np.array([pars.get(p.id+str(j), p.default)
113                              for j in range(1, p.length+1)])
114        else:
115            value = pars.get(p.id, p.default)
116        args[p.id] = value
117    x, y = model_info.profile(**args)
118    return x, y, model_info.profile_axes
119
120def get_mesh(model_info, values, dim='1d', mono=False):
121    # type: (ModelInfo, Dict[str, float], str, bool) -> List[Tuple[float, np.ndarray, np.ndarry]]
122    """
123    Retrieve the dispersity mesh described by the parameter set.
124
125    Returns a list of *(value, dispersity, weights)* with one tuple for each
126    parameter in the model call parameters.  Inactive parameters return the
127    default value with a weight of 1.0.
128    """
129    parameters = model_info.parameters
130    if mono:
131        active = lambda name: False
132    elif dim == '1d':
133        active = lambda name: name in parameters.pd_1d
134    elif dim == '2d':
135        active = lambda name: name in parameters.pd_2d
136    else:
137        active = lambda name: True
138
139    #print("pars",[p.id for p in parameters.call_parameters])
140    mesh = [_get_par_weights(p, values, active(p.name))
141            for p in parameters.call_parameters]
142    return mesh
143
144
145def _get_par_weights(parameter, values, active=True):
146    # type: (Parameter, Dict[str, float]) -> Tuple[float, np.ndarray, np.ndarray]
147    """
148    Generate the distribution for parameter *name* given the parameter values
149    in *pars*.
150
151    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
152    from the *pars* dictionary for parameter value and parameter dispersion.
153    """
154    value = float(values.get(parameter.name, parameter.default))
155    npts = values.get(parameter.name+'_pd_n', 0)
156    width = values.get(parameter.name+'_pd', 0.0)
157    relative = parameter.relative_pd
158    if npts == 0 or width == 0.0 or not active:
159        # Note: orientation parameters have the viewing angle as the parameter
160        # value and the jitter in the distribution, so be sure to set the
161        # empty pd for orientation parameters to 0.
162        pd = [value if relative or not parameter.polydisperse else 0.0], [1.0]
163    else:
164        limits = parameter.limits
165        disperser = values.get(parameter.name+'_pd_type', 'gaussian')
166        nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
167        pd = weights.get_weights(disperser, npts, width, nsigma,
168                                 value, limits, relative)
169    return value, pd[0], pd[1]
170
171
172def _vol_pars(model_info, values):
173    # type: (ModelInfo, ParameterSet) -> Tuple[np.ndarray, np.ndarray]
174    vol_pars = [_get_par_weights(p, values)
175                for p in model_info.parameters.call_parameters
176                if p.type == 'volume']
177    #import pylab; pylab.plot(vol_pars[0][0],vol_pars[0][1]); pylab.show()
178    dispersity, weight = dispersion_mesh(model_info, vol_pars)
179    return dispersity, weight
180
181
182def _make_sesans_transform(data):
183    from sas.sascalc.data_util.nxsunit import Converter
184
185    # Pre-compute the Hankel matrix (H)
186    SElength = Converter(data._xunit)(data.x, "A")
187
188    theta_max = Converter("radians")(data.sample.zacceptance)[0]
189    q_max = 2 * np.pi / np.max(data.source.wavelength) * np.sin(theta_max)
190    zaccept = Converter("1/A")(q_max, "1/" + data.source.wavelength_unit),
191
192    Rmax = 10000000
193    hankel = sesans.SesansTransform(data.x, SElength,
194                                    data.source.wavelength,
195                                    zaccept, Rmax)
196    return hankel
197
198
199class DataMixin(object):
200    """
201    DataMixin captures the common aspects of evaluating a SAS model for a
202    particular data set, including calculating Iq and evaluating the
203    resolution function.  It is used in particular by :class:`DirectModel`,
204    which evaluates a SAS model parameters as key word arguments to the
205    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
206    model and data for use with the Bumps fitting engine.  It is not
207    currently used by :class:`sasview_model.SasviewModel` since this will
208    require a number of changes to SasView before we can do it.
209
210    :meth:`_interpret_data` initializes the data structures necessary
211    to manage the calculations.  This sets attributes in the child class
212    such as *data_type* and *resolution*.
213
214    :meth:`_calc_theory` evaluates the model at the given control values.
215
216    :meth:`_set_data` sets the intensity data in the data object,
217    possibly with random noise added.  This is useful for simulating a
218    dataset with the results from :meth:`_calc_theory`.
219    """
220    def _interpret_data(self, data, model):
221        # type: (Data, KernelModel) -> None
222        # pylint: disable=attribute-defined-outside-init
223
224        self._data = data
225        self._model = model
226
227        # interpret data
228        if hasattr(data, 'isSesans') and data.isSesans:
229            self.data_type = 'sesans'
230        elif hasattr(data, 'qx_data'):
231            self.data_type = 'Iqxy'
232        elif getattr(data, 'oriented', False):
233            self.data_type = 'Iq-oriented'
234        else:
235            self.data_type = 'Iq'
236
237        if self.data_type == 'sesans':
238            res = _make_sesans_transform(data)
239            index = slice(None, None)
240            if data.y is not None:
241                Iq, dIq = data.y, data.dy
242            else:
243                Iq, dIq = None, None
244        elif self.data_type == 'Iqxy':
245            #if not model.info.parameters.has_2d:
246            #    raise ValueError("not 2D without orientation or magnetic parameters")
247            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
248            qmin = getattr(data, 'qmin', 1e-16)
249            qmax = getattr(data, 'qmax', np.inf)
250            accuracy = getattr(data, 'accuracy', 'Low')
251            index = (data.mask == 0) & (q >= qmin) & (q <= qmax)
252            if data.data is not None:
253                index &= ~np.isnan(data.data)
254                Iq = data.data[index]
255                dIq = data.err_data[index]
256            else:
257                Iq, dIq = None, None
258            res = resolution2d.Pinhole2D(data=data, index=index,
259                                         nsigma=3.0, accuracy=accuracy)
260        elif self.data_type == 'Iq':
261            index = (data.x >= data.qmin) & (data.x <= data.qmax)
262            mask = getattr(data, 'mask', None)
263            if mask is not None:
264                index &= (mask == 0)
265            if data.y is not None:
266                index &= ~np.isnan(data.y)
267                Iq = data.y[index]
268                dIq = data.dy[index]
269            else:
270                Iq, dIq = None, None
271            if getattr(data, 'dx', None) is not None:
272                q, dq = data.x[index], data.dx[index]
273                if (dq > 0).any():
274                    res = resolution.Pinhole1D(q, dq)
275                else:
276                    res = resolution.Perfect1D(q)
277            elif (getattr(data, 'dxl', None) is not None
278                  and getattr(data, 'dxw', None) is not None):
279                res = resolution.Slit1D(data.x[index],
280                                        qx_width=data.dxl[index],
281                                        qy_width=data.dxw[index])
282            else:
283                res = resolution.Perfect1D(data.x[index])
284        elif self.data_type == 'Iq-oriented':
285            index = (data.x >= data.qmin) & (data.x <= data.qmax)
286            if data.y is not None:
287                index &= ~np.isnan(data.y)
288                Iq = data.y[index]
289                dIq = data.dy[index]
290            else:
291                Iq, dIq = None, None
292            if (getattr(data, 'dxl', None) is None
293                    or getattr(data, 'dxw', None) is None):
294                raise ValueError("oriented sample with 1D data needs slit resolution")
295
296            res = resolution2d.Slit2D(data.x[index],
297                                      qx_width=data.dxw[index],
298                                      qy_width=data.dxl[index])
299        else:
300            raise ValueError("Unknown data type") # never gets here
301
302        # Remember function inputs so we can delay loading the function and
303        # so we can save/restore state
304        self._kernel = None
305        self.Iq, self.dIq, self.index = Iq, dIq, index
306        self.resolution = res
307
308    def _set_data(self, Iq, noise=None):
309        # type: (np.ndarray, Optional[float]) -> None
310        # pylint: disable=attribute-defined-outside-init
311        if noise is not None:
312            self.dIq = Iq*noise*0.01
313        dy = self.dIq
314        y = Iq + np.random.randn(*dy.shape) * dy
315        self.Iq = y
316        if self.data_type in ('Iq', 'Iq-oriented'):
317            if self._data.y is None:
318                self._data.y = np.empty(len(self._data.x), 'd')
319            if self._data.dy is None:
320                self._data.dy = np.empty(len(self._data.x), 'd')
321            self._data.dy[self.index] = dy
322            self._data.y[self.index] = y
323        elif self.data_type == 'Iqxy':
324            if self._data.data is None:
325                self._data.data = np.empty_like(self._data.qx_data, 'd')
326            if self._data.err_data is None:
327                self._data.err_data = np.empty_like(self._data.qx_data, 'd')
328            self._data.data[self.index] = y
329            self._data.err_data[self.index] = dy
330        elif self.data_type == 'sesans':
331            if self._data.y is None:
332                self._data.y = np.empty(len(self._data.x), 'd')
333            self._data.y[self.index] = y
334        else:
335            raise ValueError("Unknown model")
336
337    def _calc_theory(self, pars, cutoff=0.0):
338        # type: (ParameterSet, float) -> np.ndarray
339        if self._kernel is None:
340            # TODO: change interfaces so that resolution returns kernel inputs
341            # Maybe have resolution always return a tuple, or maybe have
342            # make_kernel accept either an ndarray or a pair of ndarrays.
343            kernel_inputs = self.resolution.q_calc
344            if isinstance(kernel_inputs, np.ndarray):
345                kernel_inputs = (kernel_inputs,)
346            self._kernel = self._model.make_kernel(kernel_inputs)
347
348        # Need to pull background out of resolution for multiple scattering
349        background = pars.get('background', DEFAULT_BACKGROUND)
350        pars = pars.copy()
351        pars['background'] = 0.
352
353        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
354        # Storing the calculated Iq values so that they can be plotted.
355        # Only applies to oriented USANS data for now.
356        # TODO: extend plotting of calculate Iq to other measurement types
357        # TODO: refactor so we don't store the result in the model
358        self.Iq_calc = Iq_calc
359        result = self.resolution.apply(Iq_calc)
360        if hasattr(self.resolution, 'nx'):
361            self.Iq_calc = (
362                self.resolution.qx_calc, self.resolution.qy_calc,
363                np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
364            )
365        return result + background
366
367
368class DirectModel(DataMixin):
369    """
370    Create a calculator object for a model.
371
372    *data* is 1D SAS, 2D SAS or SESANS data
373
374    *model* is a model calculator return from :func:`generate.load_model`
375
376    *cutoff* is the polydispersity weight cutoff.
377    """
378    def __init__(self, data, model, cutoff=1e-5):
379        # type: (Data, KernelModel, float) -> None
380        self.model = model
381        self.cutoff = cutoff
382        # Note: _interpret_data defines the model attributes
383        self._interpret_data(data, model)
384
385    def __call__(self, **pars):
386        # type: (**float) -> np.ndarray
387        return self._calc_theory(pars, cutoff=self.cutoff)
388
389    def simulate_data(self, noise=None, **pars):
390        # type: (Optional[float], **float) -> None
391        """
392        Generate simulated data for the model.
393        """
394        Iq = self.__call__(**pars)
395        self._set_data(Iq, noise=noise)
396
397    def profile(self, **pars):
398        # type: (**float) -> None
399        """
400        Generate a plottable profile.
401        """
402        return call_profile(self.model.info, **pars)
403
404def main():
405    # type: () -> None
406    """
407    Program to evaluate a particular model at a set of q values.
408    """
409    import sys
410    from .data import empty_data1D, empty_data2D
411    from .core import load_model_info, build_model
412
413    if len(sys.argv) < 3:
414        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
415        sys.exit(1)
416    model_name = sys.argv[1]
417    call = sys.argv[2].upper()
418    if call != "ER_VR":
419        try:
420            values = [float(v) for v in call.split(',')]
421        except ValueError:
422            values = []
423        if len(values) == 1:
424            q, = values
425            data = empty_data1D([q])
426        elif len(values) == 2:
427            qx, qy = values
428            data = empty_data2D([qx], [qy])
429        else:
430            print("use q or qx,qy or ER or VR")
431            sys.exit(1)
432    else:
433        data = empty_data1D([0.001])  # Data not used in ER/VR
434
435    model_info = load_model_info(model_name)
436    model = build_model(model_info)
437    calculator = DirectModel(data, model)
438    pars = dict((k, (float(v) if not k.endswith("_pd_type") else v))
439                for pair in sys.argv[3:]
440                for k, v in [pair.split('=')])
441    if call == "ER_VR":
442        ER = call_ER(model_info, pars)
443        VR = call_VR(model_info, pars)
444        print(ER, VR)
445    else:
446        Iq = calculator(**pars)
447        print(Iq[0])
448
449if __name__ == "__main__":
450    main()
Note: See TracBrowser for help on using the repository browser.