source: sasmodels/sasmodels/direct_model.py @ 6d5601c

core_shell_microgelsmagnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since 6d5601c was 7b9e4dd, checked in by Paul Kienzle <pkienzle@…>, 6 years ago

fix doc plots to use the default background

  • Property mode set to 100644
File size: 17.4 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import make_kernel_args, dispersion_mesh
33from .modelinfo import DEFAULT_BACKGROUND
34
35# pylint: disable=unused-import
36try:
37    from typing import Optional, Dict, Tuple
38except ImportError:
39    pass
40else:
41    from .data import Data
42    from .kernel import Kernel, KernelModel
43    from .modelinfo import Parameter, ParameterSet
44# pylint: enable=unused-import
45
46def call_kernel(calculator, pars, cutoff=0., mono=False):
47    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
48    """
49    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
50
51    *cutoff* is the limiting value for the product of dispersion weights used
52    to perform the multidimensional dispersion calculation more quickly at a
53    slight cost to accuracy. The default value of *cutoff=0* integrates over
54    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
55    with an error of about 1%, which is usually less than the measurement
56    uncertainty.
57
58    *mono* is True if polydispersity should be set to none on all parameters.
59    """
60    mesh = get_mesh(calculator.info, pars, dim=calculator.dim, mono=mono)
61    #print("pars", list(zip(*mesh))[0])
62    call_details, values, is_magnetic = make_kernel_args(calculator, mesh)
63    #print("values:", values)
64    return calculator(call_details, values, cutoff, is_magnetic)
65
66def call_ER(model_info, pars):
67    # type: (ModelInfo, ParameterSet) -> float
68    """
69    Call the model ER function using *values*.
70
71    *model_info* is either *model.info* if you have a loaded model,
72    or *kernel.info* if you have a model kernel prepared for evaluation.
73    """
74    if model_info.ER is None:
75        return 1.0
76    elif not model_info.parameters.form_volume_parameters:
77        # handle the case where ER is provided but model is not polydisperse
78        return model_info.ER()
79    else:
80        value, weight = _vol_pars(model_info, pars)
81        individual_radii = model_info.ER(*value)
82        return np.sum(weight*individual_radii) / np.sum(weight)
83
84
85def call_VR(model_info, pars):
86    # type: (ModelInfo, ParameterSet) -> float
87    """
88    Call the model VR function using *pars*.
89
90    *model_info* is either *model.info* if you have a loaded model,
91    or *kernel.info* if you have a model kernel prepared for evaluation.
92    """
93    if model_info.VR is None:
94        return 1.0
95    elif not model_info.parameters.form_volume_parameters:
96        # handle the case where ER is provided but model is not polydisperse
97        return model_info.VR()
98    else:
99        value, weight = _vol_pars(model_info, pars)
100        whole, part = model_info.VR(*value)
101        return np.sum(weight*part)/np.sum(weight*whole)
102
103
104def call_profile(model_info, **pars):
105    # type: (ModelInfo, ...) -> Tuple[np.ndarray, np.ndarray, Tuple[str, str]]
106    """
107    Returns the profile *x, y, (xlabel, ylabel)* representing the model.
108    """
109    args = {}
110    for p in model_info.parameters.kernel_parameters:
111        if p.length > 1:
112            value = np.array([pars.get(p.id+str(j), p.default)
113                              for j in range(1, p.length+1)])
114        else:
115            value = pars.get(p.id, p.default)
116        args[p.id] = value
117    x, y = model_info.profile(**args)
118    return x, y, model_info.profile_axes
119
120def get_mesh(model_info, values, dim='1d', mono=False):
121    # type: (ModelInfo, Dict[str, float], str, bool) -> List[Tuple[float, np.ndarray, np.ndarry]]
122    """
123    Retrieve the dispersity mesh described by the parameter set.
124
125    Returns a list of *(value, dispersity, weights)* with one tuple for each
126    parameter in the model call parameters.  Inactive parameters return the
127    default value with a weight of 1.0.
128    """
129    parameters = model_info.parameters
130    if mono:
131        active = lambda name: False
132    elif dim == '1d':
133        active = lambda name: name in parameters.pd_1d
134    elif dim == '2d':
135        active = lambda name: name in parameters.pd_2d
136    else:
137        active = lambda name: True
138
139    #print("pars",[p.id for p in parameters.call_parameters])
140    mesh = [_get_par_weights(p, values, active(p.name))
141            for p in parameters.call_parameters]
142    return mesh
143
144
145def _get_par_weights(parameter, values, active=True):
146    # type: (Parameter, Dict[str, float]) -> Tuple[float, np.ndarray, np.ndarray]
147    """
148    Generate the distribution for parameter *name* given the parameter values
149    in *pars*.
150
151    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
152    from the *pars* dictionary for parameter value and parameter dispersion.
153    """
154    value = float(values.get(parameter.name, parameter.default))
155    npts = values.get(parameter.name+'_pd_n', 0)
156    width = values.get(parameter.name+'_pd', 0.0)
157    relative = parameter.relative_pd
158    if npts == 0 or width == 0.0 or not active:
159        # Note: orientation parameters have the viewing angle as the parameter
160        # value and the jitter in the distribution, so be sure to set the
161        # empty pd for orientation parameters to 0.
162        pd = [value if relative or not parameter.polydisperse else 0.0], [1.0]
163    else:
164        limits = parameter.limits
165        disperser = values.get(parameter.name+'_pd_type', 'gaussian')
166        nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
167        pd = weights.get_weights(disperser, npts, width, nsigma,
168                                 value, limits, relative)
169    return value, pd[0], pd[1]
170
171
172def _vol_pars(model_info, values):
173    # type: (ModelInfo, ParameterSet) -> Tuple[np.ndarray, np.ndarray]
174    vol_pars = [_get_par_weights(p, values)
175                for p in model_info.parameters.call_parameters
176                if p.type == 'volume']
177    #import pylab; pylab.plot(vol_pars[0][0],vol_pars[0][1]); pylab.show()
178    dispersity, weight = dispersion_mesh(model_info, vol_pars)
179    return dispersity, weight
180
181
182def _make_sesans_transform(data):
183    from sas.sascalc.data_util.nxsunit import Converter
184
185    # Pre-compute the Hankel matrix (H)
186    SElength = Converter(data._xunit)(data.x, "A")
187
188    theta_max = Converter("radians")(data.sample.zacceptance)[0]
189    q_max = 2 * np.pi / np.max(data.source.wavelength) * np.sin(theta_max)
190    zaccept = Converter("1/A")(q_max, "1/" + data.source.wavelength_unit),
191
192    Rmax = 10000000
193    hankel = sesans.SesansTransform(data.x, SElength,
194                                    data.source.wavelength,
195                                    zaccept, Rmax)
196    return hankel
197
198
199class DataMixin(object):
200    """
201    DataMixin captures the common aspects of evaluating a SAS model for a
202    particular data set, including calculating Iq and evaluating the
203    resolution function.  It is used in particular by :class:`DirectModel`,
204    which evaluates a SAS model parameters as key word arguments to the
205    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
206    model and data for use with the Bumps fitting engine.  It is not
207    currently used by :class:`sasview_model.SasviewModel` since this will
208    require a number of changes to SasView before we can do it.
209
210    :meth:`_interpret_data` initializes the data structures necessary
211    to manage the calculations.  This sets attributes in the child class
212    such as *data_type* and *resolution*.
213
214    :meth:`_calc_theory` evaluates the model at the given control values.
215
216    :meth:`_set_data` sets the intensity data in the data object,
217    possibly with random noise added.  This is useful for simulating a
218    dataset with the results from :meth:`_calc_theory`.
219    """
220    def _interpret_data(self, data, model):
221        # type: (Data, KernelModel) -> None
222        # pylint: disable=attribute-defined-outside-init
223
224        self._data = data
225        self._model = model
226
227        # interpret data
228        if hasattr(data, 'isSesans') and data.isSesans:
229            self.data_type = 'sesans'
230        elif hasattr(data, 'qx_data'):
231            self.data_type = 'Iqxy'
232        elif getattr(data, 'oriented', False):
233            self.data_type = 'Iq-oriented'
234        else:
235            self.data_type = 'Iq'
236
237        if self.data_type == 'sesans':
238            res = _make_sesans_transform(data)
239            index = slice(None, None)
240            if data.y is not None:
241                Iq, dIq = data.y, data.dy
242            else:
243                Iq, dIq = None, None
244            #self._theory = np.zeros_like(q)
245            q_vectors = [res.q_calc]
246        elif self.data_type == 'Iqxy':
247            #if not model.info.parameters.has_2d:
248            #    raise ValueError("not 2D without orientation or magnetic parameters")
249            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
250            qmin = getattr(data, 'qmin', 1e-16)
251            qmax = getattr(data, 'qmax', np.inf)
252            accuracy = getattr(data, 'accuracy', 'Low')
253            index = (data.mask == 0) & (q >= qmin) & (q <= qmax)
254            if data.data is not None:
255                index &= ~np.isnan(data.data)
256                Iq = data.data[index]
257                dIq = data.err_data[index]
258            else:
259                Iq, dIq = None, None
260            res = resolution2d.Pinhole2D(data=data, index=index,
261                                         nsigma=3.0, accuracy=accuracy)
262            #self._theory = np.zeros_like(self.Iq)
263            q_vectors = res.q_calc
264        elif self.data_type == 'Iq':
265            index = (data.x >= data.qmin) & (data.x <= data.qmax)
266            mask = getattr(data, 'mask', None)
267            if mask is not None:
268                index &= (mask == 0)
269            if data.y is not None:
270                index &= ~np.isnan(data.y)
271                Iq = data.y[index]
272                dIq = data.dy[index]
273            else:
274                Iq, dIq = None, None
275            if getattr(data, 'dx', None) is not None:
276                q, dq = data.x[index], data.dx[index]
277                if (dq > 0).any():
278                    res = resolution.Pinhole1D(q, dq)
279                else:
280                    res = resolution.Perfect1D(q)
281            elif (getattr(data, 'dxl', None) is not None
282                  and getattr(data, 'dxw', None) is not None):
283                res = resolution.Slit1D(data.x[index],
284                                        qx_width=data.dxl[index],
285                                        qy_width=data.dxw[index])
286            else:
287                res = resolution.Perfect1D(data.x[index])
288
289            #self._theory = np.zeros_like(self.Iq)
290            q_vectors = [res.q_calc]
291        elif self.data_type == 'Iq-oriented':
292            index = (data.x >= data.qmin) & (data.x <= data.qmax)
293            if data.y is not None:
294                index &= ~np.isnan(data.y)
295                Iq = data.y[index]
296                dIq = data.dy[index]
297            else:
298                Iq, dIq = None, None
299            if (getattr(data, 'dxl', None) is None
300                    or getattr(data, 'dxw', None) is None):
301                raise ValueError("oriented sample with 1D data needs slit resolution")
302
303            res = resolution2d.Slit2D(data.x[index],
304                                      qx_width=data.dxw[index],
305                                      qy_width=data.dxl[index])
306            q_vectors = res.q_calc
307        else:
308            raise ValueError("Unknown data type") # never gets here
309
310        # Remember function inputs so we can delay loading the function and
311        # so we can save/restore state
312        self._kernel_inputs = q_vectors
313        self._kernel = None
314        self.Iq, self.dIq, self.index = Iq, dIq, index
315        self.resolution = res
316
317    def _set_data(self, Iq, noise=None):
318        # type: (np.ndarray, Optional[float]) -> None
319        # pylint: disable=attribute-defined-outside-init
320        if noise is not None:
321            self.dIq = Iq*noise*0.01
322        dy = self.dIq
323        y = Iq + np.random.randn(*dy.shape) * dy
324        self.Iq = y
325        if self.data_type in ('Iq', 'Iq-oriented'):
326            if self._data.y is None:
327                self._data.y = np.empty(len(self._data.x), 'd')
328            if self._data.dy is None:
329                self._data.dy = np.empty(len(self._data.x), 'd')
330            self._data.dy[self.index] = dy
331            self._data.y[self.index] = y
332        elif self.data_type == 'Iqxy':
333            if self._data.data is None:
334                self._data.data = np.empty_like(self._data.qx_data, 'd')
335            if self._data.err_data is None:
336                self._data.err_data = np.empty_like(self._data.qx_data, 'd')
337            self._data.data[self.index] = y
338            self._data.err_data[self.index] = dy
339        elif self.data_type == 'sesans':
340            if self._data.y is None:
341                self._data.y = np.empty(len(self._data.x), 'd')
342            self._data.y[self.index] = y
343        else:
344            raise ValueError("Unknown model")
345
346    def _calc_theory(self, pars, cutoff=0.0):
347        # type: (ParameterSet, float) -> np.ndarray
348        if self._kernel is None:
349            self._kernel = self._model.make_kernel(self._kernel_inputs)
350
351        # Need to pull background out of resolution for multiple scattering
352        background = pars.get('background', DEFAULT_BACKGROUND)
353        pars = pars.copy()
354        pars['background'] = 0.
355
356        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
357        # Storing the calculated Iq values so that they can be plotted.
358        # Only applies to oriented USANS data for now.
359        # TODO: extend plotting of calculate Iq to other measurement types
360        # TODO: refactor so we don't store the result in the model
361        self.Iq_calc = Iq_calc
362        result = self.resolution.apply(Iq_calc)
363        if hasattr(self.resolution, 'nx'):
364            self.Iq_calc = (
365                self.resolution.qx_calc, self.resolution.qy_calc,
366                np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
367            )
368        return result + background
369
370
371class DirectModel(DataMixin):
372    """
373    Create a calculator object for a model.
374
375    *data* is 1D SAS, 2D SAS or SESANS data
376
377    *model* is a model calculator return from :func:`generate.load_model`
378
379    *cutoff* is the polydispersity weight cutoff.
380    """
381    def __init__(self, data, model, cutoff=1e-5):
382        # type: (Data, KernelModel, float) -> None
383        self.model = model
384        self.cutoff = cutoff
385        # Note: _interpret_data defines the model attributes
386        self._interpret_data(data, model)
387
388    def __call__(self, **pars):
389        # type: (**float) -> np.ndarray
390        return self._calc_theory(pars, cutoff=self.cutoff)
391
392    def simulate_data(self, noise=None, **pars):
393        # type: (Optional[float], **float) -> None
394        """
395        Generate simulated data for the model.
396        """
397        Iq = self.__call__(**pars)
398        self._set_data(Iq, noise=noise)
399
400    def profile(self, **pars):
401        # type: (**float) -> None
402        """
403        Generate a plottable profile.
404        """
405        return call_profile(self.model.info, **pars)
406
407def main():
408    # type: () -> None
409    """
410    Program to evaluate a particular model at a set of q values.
411    """
412    import sys
413    from .data import empty_data1D, empty_data2D
414    from .core import load_model_info, build_model
415
416    if len(sys.argv) < 3:
417        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
418        sys.exit(1)
419    model_name = sys.argv[1]
420    call = sys.argv[2].upper()
421    if call != "ER_VR":
422        try:
423            values = [float(v) for v in call.split(',')]
424        except ValueError:
425            values = []
426        if len(values) == 1:
427            q, = values
428            data = empty_data1D([q])
429        elif len(values) == 2:
430            qx, qy = values
431            data = empty_data2D([qx], [qy])
432        else:
433            print("use q or qx,qy or ER or VR")
434            sys.exit(1)
435    else:
436        data = empty_data1D([0.001])  # Data not used in ER/VR
437
438    model_info = load_model_info(model_name)
439    model = build_model(model_info)
440    calculator = DirectModel(data, model)
441    pars = dict((k, (float(v) if not k.endswith("_pd_type") else v))
442                for pair in sys.argv[3:]
443                for k, v in [pair.split('=')])
444    if call == "ER_VR":
445        ER = call_ER(model_info, pars)
446        VR = call_VR(model_info, pars)
447        print(ER, VR)
448    else:
449        Iq = calculator(**pars)
450        print(Iq[0])
451
452if __name__ == "__main__":
453    main()
Note: See TracBrowser for help on using the repository browser.