source: sasmodels/sasmodels/direct_model.py @ fbb9397

core_shell_microgelsmagnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since fbb9397 was 32f87a5, checked in by Paul Kienzle <pkienzle@…>, 7 years ago

update sasview wrapper with new way of handling orientation dispersity

  • Property mode set to 100644
File size: 16.5 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import make_kernel_args, dispersion_mesh
33
34try:
35    from typing import Optional, Dict, Tuple
36except ImportError:
37    pass
38else:
39    from .data import Data
40    from .kernel import Kernel, KernelModel
41    from .modelinfo import Parameter, ParameterSet
42
43def call_kernel(calculator, pars, cutoff=0., mono=False):
44    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
45    """
46    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
47
48    *cutoff* is the limiting value for the product of dispersion weights used
49    to perform the multidimensional dispersion calculation more quickly at a
50    slight cost to accuracy. The default value of *cutoff=0* integrates over
51    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
52    with an error of about 1%, which is usually less than the measurement
53    uncertainty.
54
55    *mono* is True if polydispersity should be set to none on all parameters.
56    """
57    parameters = calculator.info.parameters
58    if mono:
59        active = lambda name: False
60    elif calculator.dim == '1d':
61        active = lambda name: name in parameters.pd_1d
62    elif calculator.dim == '2d':
63        active = lambda name: name in parameters.pd_2d
64    else:
65        active = lambda name: True
66
67    #print("pars",[p.id for p in parameters.call_parameters])
68    mesh = [get_weights(p, pars, active(p.name))
69            for p in parameters.call_parameters]
70
71    call_details, values, is_magnetic = make_kernel_args(calculator, mesh)
72    #print("values:", values)
73    return calculator(call_details, values, cutoff, is_magnetic)
74
75
76def call_ER(model_info, pars):
77    # type: (ModelInfo, ParameterSet) -> float
78    """
79    Call the model ER function using *values*.
80
81    *model_info* is either *model.info* if you have a loaded model,
82    or *kernel.info* if you have a model kernel prepared for evaluation.
83    """
84    if model_info.ER is None:
85        return 1.0
86    elif not model_info.parameters.form_volume_parameters:
87        # handle the case where ER is provided but model is not polydisperse
88        return model_info.ER()
89    else:
90        value, weight = _vol_pars(model_info, pars)
91        individual_radii = model_info.ER(*value)
92        return np.sum(weight*individual_radii) / np.sum(weight)
93
94
95def call_VR(model_info, pars):
96    # type: (ModelInfo, ParameterSet) -> float
97    """
98    Call the model VR function using *pars*.
99
100    *model_info* is either *model.info* if you have a loaded model,
101    or *kernel.info* if you have a model kernel prepared for evaluation.
102    """
103    if model_info.VR is None:
104        return 1.0
105    elif not model_info.parameters.form_volume_parameters:
106        # handle the case where ER is provided but model is not polydisperse
107        return model_info.VR()
108    else:
109        value, weight = _vol_pars(model_info, pars)
110        whole, part = model_info.VR(*value)
111        return np.sum(weight*part)/np.sum(weight*whole)
112
113
114def call_profile(model_info, **pars):
115    # type: (ModelInfo, ...) -> Tuple[np.ndarray, np.ndarray, Tuple[str, str]]
116    """
117    Returns the profile *x, y, (xlabel, ylabel)* representing the model.
118    """
119    args = {}
120    for p in model_info.parameters.kernel_parameters:
121        if p.length > 1:
122            value = np.array([pars.get(p.id+str(j), p.default)
123                              for j in range(1, p.length+1)])
124        else:
125            value = pars.get(p.id, p.default)
126        args[p.id] = value
127    x, y = model_info.profile(**args)
128    return x, y, model_info.profile_axes
129
130
131def get_weights(parameter, values, active=True):
132    # type: (Parameter, Dict[str, float]) -> Tuple[np.ndarray, np.ndarray]
133    """
134    Generate the distribution for parameter *name* given the parameter values
135    in *pars*.
136
137    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
138    from the *pars* dictionary for parameter value and parameter dispersion.
139    """
140    value = float(values.get(parameter.name, parameter.default))
141    npts = values.get(parameter.name+'_pd_n', 0)
142    width = values.get(parameter.name+'_pd', 0.0)
143    relative = parameter.relative_pd
144    if npts == 0 or width == 0 or not active:
145        # Note: orientation parameters have the viewing angle as the parameter
146        # value and the jitter in the distribution, so be sure to set the
147        # empty pd for orientation parameters to 0.
148        pd = [value if relative else 0.0], [1.0]
149    else:
150        limits = parameter.limits
151        disperser = values.get(parameter.name+'_pd_type', 'gaussian')
152        nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
153        pd = weights.get_weights(disperser, npts, width, nsigma,
154                                value, limits, relative)
155    return value, pd[0], pd[1]
156
157
158def _vol_pars(model_info, pars):
159    # type: (ModelInfo, ParameterSet) -> Tuple[np.ndarray, np.ndarray]
160    vol_pars = [get_weights(p, pars)
161                for p in model_info.parameters.call_parameters
162                if p.type == 'volume']
163    #import pylab; pylab.plot(vol_pars[0][0],vol_pars[0][1]); pylab.show()
164    value, weight = dispersion_mesh(model_info, vol_pars)
165    return value, weight
166
167
168class DataMixin(object):
169    """
170    DataMixin captures the common aspects of evaluating a SAS model for a
171    particular data set, including calculating Iq and evaluating the
172    resolution function.  It is used in particular by :class:`DirectModel`,
173    which evaluates a SAS model parameters as key word arguments to the
174    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
175    model and data for use with the Bumps fitting engine.  It is not
176    currently used by :class:`sasview_model.SasviewModel` since this will
177    require a number of changes to SasView before we can do it.
178
179    :meth:`_interpret_data` initializes the data structures necessary
180    to manage the calculations.  This sets attributes in the child class
181    such as *data_type* and *resolution*.
182
183    :meth:`_calc_theory` evaluates the model at the given control values.
184
185    :meth:`_set_data` sets the intensity data in the data object,
186    possibly with random noise added.  This is useful for simulating a
187    dataset with the results from :meth:`_calc_theory`.
188    """
189    def _interpret_data(self, data, model):
190        # type: (Data, KernelModel) -> None
191        # pylint: disable=attribute-defined-outside-init
192
193        self._data = data
194        self._model = model
195
196        # interpret data
197        if hasattr(data, 'isSesans') and data.isSesans:
198            self.data_type = 'sesans'
199        elif hasattr(data, 'qx_data'):
200            self.data_type = 'Iqxy'
201        elif getattr(data, 'oriented', False):
202            self.data_type = 'Iq-oriented'
203        else:
204            self.data_type = 'Iq'
205
206        if self.data_type == 'sesans':
207            q = sesans.make_q(data.sample.zacceptance, data.Rmax)
208            index = slice(None, None)
209            res = None
210            if data.y is not None:
211                Iq, dIq = data.y, data.dy
212            else:
213                Iq, dIq = None, None
214            #self._theory = np.zeros_like(q)
215            q_vectors = [q]
216            q_mono = sesans.make_all_q(data)
217        elif self.data_type == 'Iqxy':
218            #if not model.info.parameters.has_2d:
219            #    raise ValueError("not 2D without orientation or magnetic parameters")
220            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
221            qmin = getattr(data, 'qmin', 1e-16)
222            qmax = getattr(data, 'qmax', np.inf)
223            accuracy = getattr(data, 'accuracy', 'Low')
224            index = ~data.mask & (q >= qmin) & (q <= qmax)
225            if data.data is not None:
226                index &= ~np.isnan(data.data)
227                Iq = data.data[index]
228                dIq = data.err_data[index]
229            else:
230                Iq, dIq = None, None
231            res = resolution2d.Pinhole2D(data=data, index=index,
232                                         nsigma=3.0, accuracy=accuracy)
233            #self._theory = np.zeros_like(self.Iq)
234            q_vectors = res.q_calc
235            q_mono = []
236        elif self.data_type == 'Iq':
237            index = (data.x >= data.qmin) & (data.x <= data.qmax)
238            if data.y is not None:
239                index &= ~np.isnan(data.y)
240                Iq = data.y[index]
241                dIq = data.dy[index]
242            else:
243                Iq, dIq = None, None
244            if getattr(data, 'dx', None) is not None:
245                q, dq = data.x[index], data.dx[index]
246                if (dq > 0).any():
247                    res = resolution.Pinhole1D(q, dq)
248                else:
249                    res = resolution.Perfect1D(q)
250            elif (getattr(data, 'dxl', None) is not None
251                  and getattr(data, 'dxw', None) is not None):
252                res = resolution.Slit1D(data.x[index],
253                                        qx_width=data.dxl[index],
254                                        qy_width=data.dxw[index])
255            else:
256                res = resolution.Perfect1D(data.x[index])
257
258            #self._theory = np.zeros_like(self.Iq)
259            q_vectors = [res.q_calc]
260            q_mono = []
261        elif self.data_type == 'Iq-oriented':
262            index = (data.x >= data.qmin) & (data.x <= data.qmax)
263            if data.y is not None:
264                index &= ~np.isnan(data.y)
265                Iq = data.y[index]
266                dIq = data.dy[index]
267            else:
268                Iq, dIq = None, None
269            if (getattr(data, 'dxl', None) is None
270                    or getattr(data, 'dxw', None) is None):
271                raise ValueError("oriented sample with 1D data needs slit resolution")
272
273            res = resolution2d.Slit2D(data.x[index],
274                                      qx_width=data.dxw[index],
275                                      qy_width=data.dxl[index])
276            q_vectors = res.q_calc
277            q_mono = []
278        else:
279            raise ValueError("Unknown data type") # never gets here
280
281        # Remember function inputs so we can delay loading the function and
282        # so we can save/restore state
283        self._kernel_inputs = q_vectors
284        self._kernel_mono_inputs = q_mono
285        self._kernel = None
286        self.Iq, self.dIq, self.index = Iq, dIq, index
287        self.resolution = res
288
289    def _set_data(self, Iq, noise=None):
290        # type: (np.ndarray, Optional[float]) -> None
291        # pylint: disable=attribute-defined-outside-init
292        if noise is not None:
293            self.dIq = Iq*noise*0.01
294        dy = self.dIq
295        y = Iq + np.random.randn(*dy.shape) * dy
296        self.Iq = y
297        if self.data_type in ('Iq', 'Iq-oriented'):
298            if self._data.y is None:
299                self._data.y = np.empty(len(self._data.x), 'd')
300            if self._data.dy is None:
301                self._data.dy = np.empty(len(self._data.x), 'd')
302            self._data.dy[self.index] = dy
303            self._data.y[self.index] = y
304        elif self.data_type == 'Iqxy':
305            if self._data.data is None:
306                self._data.data = np.empty_like(self._data.qx_data, 'd')
307            if self._data.err_data is None:
308                self._data.err_data = np.empty_like(self._data.qx_data, 'd')
309            self._data.data[self.index] = y
310            self._data.err_data[self.index] = dy
311        elif self.data_type == 'sesans':
312            if self._data.y is None:
313                self._data.y = np.empty(len(self._data.x), 'd')
314            self._data.y[self.index] = y
315        else:
316            raise ValueError("Unknown model")
317
318    def _calc_theory(self, pars, cutoff=0.0):
319        # type: (ParameterSet, float) -> np.ndarray
320        if self._kernel is None:
321            self._kernel = self._model.make_kernel(self._kernel_inputs)
322            self._kernel_mono = (
323                self._model.make_kernel(self._kernel_mono_inputs)
324                if self._kernel_mono_inputs else None)
325
326        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
327        # Storing the calculated Iq values so that they can be plotted.
328        # Only applies to oriented USANS data for now.
329        # TODO: extend plotting of calculate Iq to other measurement types
330        # TODO: refactor so we don't store the result in the model
331        self.Iq_calc = Iq_calc
332        if self.data_type == 'sesans':
333            Iq_mono = (call_kernel(self._kernel_mono, pars, mono=True)
334                       if self._kernel_mono_inputs else None)
335            result = sesans.transform(self._data,
336                                      self._kernel_inputs[0], Iq_calc,
337                                      self._kernel_mono_inputs, Iq_mono)
338        else:
339            result = self.resolution.apply(Iq_calc)
340            if hasattr(self.resolution, 'nx'):
341                self.Iq_calc = (
342                    self.resolution.qx_calc, self.resolution.qy_calc,
343                    np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
344                )
345        return result
346
347
348class DirectModel(DataMixin):
349    """
350    Create a calculator object for a model.
351
352    *data* is 1D SAS, 2D SAS or SESANS data
353
354    *model* is a model calculator return from :func:`generate.load_model`
355
356    *cutoff* is the polydispersity weight cutoff.
357    """
358    def __init__(self, data, model, cutoff=1e-5):
359        # type: (Data, KernelModel, float) -> None
360        self.model = model
361        self.cutoff = cutoff
362        # Note: _interpret_data defines the model attributes
363        self._interpret_data(data, model)
364
365    def __call__(self, **pars):
366        # type: (**float) -> np.ndarray
367        return self._calc_theory(pars, cutoff=self.cutoff)
368
369    def simulate_data(self, noise=None, **pars):
370        # type: (Optional[float], **float) -> None
371        """
372        Generate simulated data for the model.
373        """
374        Iq = self.__call__(**pars)
375        self._set_data(Iq, noise=noise)
376
377    def profile(self, **pars):
378        # type: (**float) -> None
379        """
380        Generate a plottable profile.
381        """
382        return call_profile(self.model.info, **pars)
383
384def main():
385    # type: () -> None
386    """
387    Program to evaluate a particular model at a set of q values.
388    """
389    import sys
390    from .data import empty_data1D, empty_data2D
391    from .core import load_model_info, build_model
392
393    if len(sys.argv) < 3:
394        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
395        sys.exit(1)
396    model_name = sys.argv[1]
397    call = sys.argv[2].upper()
398    if call != "ER_VR":
399        try:
400            values = [float(v) for v in call.split(',')]
401        except Exception:
402            values = []
403        if len(values) == 1:
404            q, = values
405            data = empty_data1D([q])
406        elif len(values) == 2:
407            qx, qy = values
408            data = empty_data2D([qx], [qy])
409        else:
410            print("use q or qx,qy or ER or VR")
411            sys.exit(1)
412    else:
413        data = empty_data1D([0.001])  # Data not used in ER/VR
414
415    model_info = load_model_info(model_name)
416    model = build_model(model_info)
417    calculator = DirectModel(data, model)
418    pars = dict((k, (float(v) if not k.endswith("_pd_type") else v))
419                for pair in sys.argv[3:]
420                for k, v in [pair.split('=')])
421    if call == "ER_VR":
422        ER = call_ER(model_info, pars)
423        VR = call_VR(model_info, pars)
424        print(ER, VR)
425    else:
426        Iq = calculator(**pars)
427        print(Iq[0])
428
429if __name__ == "__main__":
430    main()
Note: See TracBrowser for help on using the repository browser.