source: sasmodels/sasmodels/direct_model.py @ 9acade6

core_shell_microgelscostrafo411magnetic_modelrelease_v0.94release_v0.95ticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since 9acade6 was bde38b5, checked in by Paul Kienzle <pkienzle@…>, 8 years ago

simplify kernel calling

  • Property mode set to 100644
File size: 15.3 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import make_kernel_args, dispersion_mesh
33
34try:
35    from typing import Optional, Dict, Tuple
36except ImportError:
37    pass
38else:
39    from .data import Data
40    from .kernel import Kernel, KernelModel
41    from .modelinfo import Parameter, ParameterSet
42
43def call_kernel(calculator, pars, cutoff=0., mono=False):
44    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
45    """
46    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
47
48    *cutoff* is the limiting value for the product of dispersion weights used
49    to perform the multidimensional dispersion calculation more quickly at a
50    slight cost to accuracy. The default value of *cutoff=0* integrates over
51    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
52    with an error of about 1%, which is usually less than the measurement
53    uncertainty.
54
55    *mono* is True if polydispersity should be set to none on all parameters.
56    """
57    parameters = calculator.info.parameters
58    if mono:
59        active = lambda name: False
60    elif calculator.dim == '1d':
61        active = lambda name: name in parameters.pd_1d
62    elif calculator.dim == '2d':
63        active = lambda name: name in parameters.pd_2d
64    else:
65        active = lambda name: True
66
67    #print("pars",[p.id for p in parameters.call_parameters])
68    vw_pairs = [(get_weights(p, pars) if active(p.name)
69                 else ([pars.get(p.name, p.default)], [1.0]))
70                for p in parameters.call_parameters]
71
72    call_details, values, is_magnetic = make_kernel_args(calculator, vw_pairs)
73    #print("values:", values)
74    return calculator(call_details, values, cutoff, is_magnetic)
75
76
77def call_ER(model_info, pars):
78    # type: (ModelInfo, ParameterSet) -> float
79    """
80    Call the model ER function using *values*.
81
82    *model_info* is either *model.info* if you have a loaded model,
83    or *kernel.info* if you have a model kernel prepared for evaluation.
84    """
85    if model_info.ER is None:
86        return 1.0
87    else:
88        value, weight = _vol_pars(model_info, pars)
89        individual_radii = model_info.ER(*value)
90        return np.sum(weight*individual_radii) / np.sum(weight)
91
92
93def call_VR(model_info, pars):
94    # type: (ModelInfo, ParameterSet) -> float
95    """
96    Call the model VR function using *pars*.
97
98    *model_info* is either *model.info* if you have a loaded model,
99    or *kernel.info* if you have a model kernel prepared for evaluation.
100    """
101    if model_info.VR is None:
102        return 1.0
103    else:
104        value, weight = _vol_pars(model_info, pars)
105        whole, part = model_info.VR(*value)
106        return np.sum(weight*part)/np.sum(weight*whole)
107
108
109def call_profile(model_info, **pars):
110    # type: (ModelInfo, ...) -> Tuple[np.ndarray, np.ndarray, Tuple[str, str]]
111    """
112    Returns the profile *x, y, (xlabel, ylabel)* representing the model.
113    """
114    args = {}
115    for p in model_info.parameters.kernel_parameters:
116        if p.length > 1:
117            value = np.array([pars.get(p.id+str(j), p.default)
118                              for j in range(1, p.length+1)])
119        else:
120            value = pars.get(p.id, p.default)
121        args[p.id] = value
122    x, y = model_info.profile(**args)
123    return x, y, model_info.profile_axes
124
125
126def get_weights(parameter, values):
127    # type: (Parameter, Dict[str, float]) -> Tuple[np.ndarray, np.ndarray]
128    """
129    Generate the distribution for parameter *name* given the parameter values
130    in *pars*.
131
132    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
133    from the *pars* dictionary for parameter value and parameter dispersion.
134    """
135    value = float(values.get(parameter.name, parameter.default))
136    relative = parameter.relative_pd
137    limits = parameter.limits
138    disperser = values.get(parameter.name+'_pd_type', 'gaussian')
139    npts = values.get(parameter.name+'_pd_n', 0)
140    width = values.get(parameter.name+'_pd', 0.0)
141    nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
142    if npts == 0 or width == 0:
143        return [value], [1.0]
144    value, weight = weights.get_weights(
145        disperser, npts, width, nsigma, value, limits, relative)
146    return value, weight / np.sum(weight)
147
148
149def _vol_pars(model_info, pars):
150    # type: (ModelInfo, ParameterSet) -> Tuple[np.ndarray, np.ndarray]
151    vol_pars = [get_weights(p, pars)
152                for p in model_info.parameters.call_parameters
153                if p.type == 'volume']
154    value, weight = dispersion_mesh(model_info, vol_pars)
155    return value, weight
156
157
158class DataMixin(object):
159    """
160    DataMixin captures the common aspects of evaluating a SAS model for a
161    particular data set, including calculating Iq and evaluating the
162    resolution function.  It is used in particular by :class:`DirectModel`,
163    which evaluates a SAS model parameters as key word arguments to the
164    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
165    model and data for use with the Bumps fitting engine.  It is not
166    currently used by :class:`sasview_model.SasviewModel` since this will
167    require a number of changes to SasView before we can do it.
168
169    :meth:`_interpret_data` initializes the data structures necessary
170    to manage the calculations.  This sets attributes in the child class
171    such as *data_type* and *resolution*.
172
173    :meth:`_calc_theory` evaluates the model at the given control values.
174
175    :meth:`_set_data` sets the intensity data in the data object,
176    possibly with random noise added.  This is useful for simulating a
177    dataset with the results from :meth:`_calc_theory`.
178    """
179    def _interpret_data(self, data, model):
180        # type: (Data, KernelModel) -> None
181        # pylint: disable=attribute-defined-outside-init
182
183        self._data = data
184        self._model = model
185
186        # interpret data
187        if hasattr(data, 'lam'):
188            self.data_type = 'sesans'
189        elif hasattr(data, 'qx_data'):
190            self.data_type = 'Iqxy'
191        elif getattr(data, 'oriented', False):
192            self.data_type = 'Iq-oriented'
193        else:
194            self.data_type = 'Iq'
195
196        if self.data_type == 'sesans':
197            q = sesans.make_q(data.sample.zacceptance, data.Rmax)
198            index = slice(None, None)
199            res = None
200            if data.y is not None:
201                Iq, dIq = data.y, data.dy
202            else:
203                Iq, dIq = None, None
204            #self._theory = np.zeros_like(q)
205            q_vectors = [q]
206            q_mono = sesans.make_all_q(data)
207        elif self.data_type == 'Iqxy':
208            #if not model.info.parameters.has_2d:
209            #    raise ValueError("not 2D without orientation or magnetic parameters")
210            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
211            qmin = getattr(data, 'qmin', 1e-16)
212            qmax = getattr(data, 'qmax', np.inf)
213            accuracy = getattr(data, 'accuracy', 'Low')
214            index = ~data.mask & (q >= qmin) & (q <= qmax)
215            if data.data is not None:
216                index &= ~np.isnan(data.data)
217                Iq = data.data[index]
218                dIq = data.err_data[index]
219            else:
220                Iq, dIq = None, None
221            res = resolution2d.Pinhole2D(data=data, index=index,
222                                         nsigma=3.0, accuracy=accuracy)
223            #self._theory = np.zeros_like(self.Iq)
224            q_vectors = res.q_calc
225            q_mono = []
226        elif self.data_type == 'Iq':
227            index = (data.x >= data.qmin) & (data.x <= data.qmax)
228            if data.y is not None:
229                index &= ~np.isnan(data.y)
230                Iq = data.y[index]
231                dIq = data.dy[index]
232            else:
233                Iq, dIq = None, None
234            if getattr(data, 'dx', None) is not None:
235                q, dq = data.x[index], data.dx[index]
236                if (dq > 0).any():
237                    res = resolution.Pinhole1D(q, dq)
238                else:
239                    res = resolution.Perfect1D(q)
240            elif (getattr(data, 'dxl', None) is not None
241                  and getattr(data, 'dxw', None) is not None):
242                res = resolution.Slit1D(data.x[index],
243                                        qx_width=data.dxl[index],
244                                        qy_width=data.dxw[index])
245            else:
246                res = resolution.Perfect1D(data.x[index])
247
248            #self._theory = np.zeros_like(self.Iq)
249            q_vectors = [res.q_calc]
250            q_mono = []
251        elif self.data_type == 'Iq-oriented':
252            index = (data.x >= data.qmin) & (data.x <= data.qmax)
253            if data.y is not None:
254                index &= ~np.isnan(data.y)
255                Iq = data.y[index]
256                dIq = data.dy[index]
257            else:
258                Iq, dIq = None, None
259            if (getattr(data, 'dxl', None) is None
260                    or getattr(data, 'dxw', None) is None):
261                raise ValueError("oriented sample with 1D data needs slit resolution")
262
263            res = resolution2d.Slit2D(data.x[index],
264                                      qx_width=data.dxw[index],
265                                      qy_width=data.dxl[index])
266            q_vectors = res.q_calc
267            q_mono = []
268        else:
269            raise ValueError("Unknown data type") # never gets here
270
271        # Remember function inputs so we can delay loading the function and
272        # so we can save/restore state
273        self._kernel_inputs = q_vectors
274        self._kernel_mono_inputs = q_mono
275        self._kernel = None
276        self.Iq, self.dIq, self.index = Iq, dIq, index
277        self.resolution = res
278
279    def _set_data(self, Iq, noise=None):
280        # type: (np.ndarray, Optional[float]) -> None
281        # pylint: disable=attribute-defined-outside-init
282        if noise is not None:
283            self.dIq = Iq*noise*0.01
284        dy = self.dIq
285        y = Iq + np.random.randn(*dy.shape) * dy
286        self.Iq = y
287        if self.data_type in ('Iq', 'Iq-oriented'):
288            self._data.dy[self.index] = dy
289            self._data.y[self.index] = y
290        elif self.data_type == 'Iqxy':
291            self._data.data[self.index] = y
292        elif self.data_type == 'sesans':
293            self._data.y[self.index] = y
294        else:
295            raise ValueError("Unknown model")
296
297    def _calc_theory(self, pars, cutoff=0.0):
298        # type: (ParameterSet, float) -> np.ndarray
299        if self._kernel is None:
300            self._kernel = self._model.make_kernel(self._kernel_inputs)
301            self._kernel_mono = (
302                self._model.make_kernel(self._kernel_mono_inputs)
303                if self._kernel_mono_inputs else None)
304
305        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
306        # Storing the calculated Iq values so that they can be plotted.
307        # Only applies to oriented USANS data for now.
308        # TODO: extend plotting of calculate Iq to other measurement types
309        # TODO: refactor so we don't store the result in the model
310        self.Iq_calc = None
311        if self.data_type == 'sesans':
312            Iq_mono = (call_kernel(self._kernel_mono, pars, mono=True)
313                       if self._kernel_mono_inputs else None)
314            result = sesans.transform(self._data,
315                                      self._kernel_inputs[0], Iq_calc,
316                                      self._kernel_mono_inputs, Iq_mono)
317        else:
318            result = self.resolution.apply(Iq_calc)
319            if hasattr(self.resolution, 'nx'):
320                self.Iq_calc = (
321                    self.resolution.qx_calc, self.resolution.qy_calc,
322                    np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
323                )
324        return result
325
326
327class DirectModel(DataMixin):
328    """
329    Create a calculator object for a model.
330
331    *data* is 1D SAS, 2D SAS or SESANS data
332
333    *model* is a model calculator return from :func:`generate.load_model`
334
335    *cutoff* is the polydispersity weight cutoff.
336    """
337    def __init__(self, data, model, cutoff=1e-5):
338        # type: (Data, KernelModel, float) -> None
339        self.model = model
340        self.cutoff = cutoff
341        # Note: _interpret_data defines the model attributes
342        self._interpret_data(data, model)
343
344    def __call__(self, **pars):
345        # type: (**float) -> np.ndarray
346        return self._calc_theory(pars, cutoff=self.cutoff)
347
348    def simulate_data(self, noise=None, **pars):
349        # type: (Optional[float], **float) -> None
350        """
351        Generate simulated data for the model.
352        """
353        Iq = self.__call__(**pars)
354        self._set_data(Iq, noise=noise)
355
356    def profile(self, **pars):
357        # type: (**float) -> None
358        """
359        Generate a plottable profile.
360        """
361        return call_profile(self.model.info, **pars)
362
363def main():
364    # type: () -> None
365    """
366    Program to evaluate a particular model at a set of q values.
367    """
368    import sys
369    from .data import empty_data1D, empty_data2D
370    from .core import load_model_info, build_model
371
372    if len(sys.argv) < 3:
373        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
374        sys.exit(1)
375    model_name = sys.argv[1]
376    call = sys.argv[2].upper()
377    if call != "ER_VR":
378        try:
379            values = [float(v) for v in call.split(',')]
380        except Exception:
381            values = []
382        if len(values) == 1:
383            q, = values
384            data = empty_data1D([q])
385        elif len(values) == 2:
386            qx, qy = values
387            data = empty_data2D([qx], [qy])
388        else:
389            print("use q or qx,qy or ER or VR")
390            sys.exit(1)
391    else:
392        data = empty_data1D([0.001])  # Data not used in ER/VR
393
394    model_info = load_model_info(model_name)
395    model = build_model(model_info)
396    calculator = DirectModel(data, model)
397    pars = dict((k, float(v))
398                for pair in sys.argv[3:]
399                for k, v in [pair.split('=')])
400    if call == "ER_VR":
401        ER = call_ER(model_info, pars)
402        VR = call_VR(model_info, pars)
403        print(ER, VR)
404    else:
405        Iq = calculator(**pars)
406        print(Iq[0])
407
408if __name__ == "__main__":
409    main()
Note: See TracBrowser for help on using the repository browser.