source: sasmodels/sasmodels/direct_model.py @ a769b54

core_shell_microgelscostrafo411magnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since a769b54 was a769b54, checked in by Paul Kienzle <pkienzle@…>, 7 years ago

allow sascomp with -data=filename to set q,dq from file

  • Property mode set to 100644
File size: 15.8 KB
Line 
1"""
2Class interface to the model calculator.
3
4Calling a model is somewhat non-trivial since the functions called depend
5on the data type.  For 1D data the *Iq* kernel needs to be called, for
62D data the *Iqxy* kernel needs to be called, and for SESANS data the
7*Iq* kernel needs to be called followed by a Hankel transform.  Before
8the kernel is called an appropriate *q* calculation vector needs to be
9constructed.  This is not the simple *q* vector where you have measured
10the data since the resolution calculation will require values beyond the
11range of the measured data.  After the calculation the resolution calculator
12must be called to return the predicted value for each measured data point.
13
14:class:`DirectModel` is a callable object that takes *parameter=value*
15keyword arguments and returns the appropriate theory values for the data.
16
17:class:`DataMixin` does the real work of interpreting the data and calling
18the model calculator.  This is used by :class:`DirectModel`, which uses
19direct parameter values and by :class:`bumps_model.Experiment` which wraps
20the parameter values in boxes so that the user can set fitting ranges, etc.
21on the individual parameters and send the model to the Bumps optimizers.
22"""
23from __future__ import print_function
24
25import numpy as np  # type: ignore
26
27# TODO: fix sesans module
28from . import sesans  # type: ignore
29from . import weights
30from . import resolution
31from . import resolution2d
32from .details import make_kernel_args, dispersion_mesh
33
34try:
35    from typing import Optional, Dict, Tuple
36except ImportError:
37    pass
38else:
39    from .data import Data
40    from .kernel import Kernel, KernelModel
41    from .modelinfo import Parameter, ParameterSet
42
43def call_kernel(calculator, pars, cutoff=0., mono=False):
44    # type: (Kernel, ParameterSet, float, bool) -> np.ndarray
45    """
46    Call *kernel* returned from *model.make_kernel* with parameters *pars*.
47
48    *cutoff* is the limiting value for the product of dispersion weights used
49    to perform the multidimensional dispersion calculation more quickly at a
50    slight cost to accuracy. The default value of *cutoff=0* integrates over
51    the entire dispersion cube.  Using *cutoff=1e-5* can be 50% faster, but
52    with an error of about 1%, which is usually less than the measurement
53    uncertainty.
54
55    *mono* is True if polydispersity should be set to none on all parameters.
56    """
57    parameters = calculator.info.parameters
58    if mono:
59        active = lambda name: False
60    elif calculator.dim == '1d':
61        active = lambda name: name in parameters.pd_1d
62    elif calculator.dim == '2d':
63        active = lambda name: name in parameters.pd_2d
64    else:
65        active = lambda name: True
66
67    #print("pars",[p.id for p in parameters.call_parameters])
68    vw_pairs = [(get_weights(p, pars) if active(p.name)
69                 else ([pars.get(p.name, p.default)], [1.0]))
70                for p in parameters.call_parameters]
71
72    call_details, values, is_magnetic = make_kernel_args(calculator, vw_pairs)
73    #print("values:", values)
74    return calculator(call_details, values, cutoff, is_magnetic)
75
76
77def call_ER(model_info, pars):
78    # type: (ModelInfo, ParameterSet) -> float
79    """
80    Call the model ER function using *values*.
81
82    *model_info* is either *model.info* if you have a loaded model,
83    or *kernel.info* if you have a model kernel prepared for evaluation.
84    """
85    if model_info.ER is None:
86        return 1.0
87    elif not model_info.parameters.form_volume_parameters:
88        # handle the case where ER is provided but model is not polydisperse
89        return model_info.ER()
90    else:
91        value, weight = _vol_pars(model_info, pars)
92        individual_radii = model_info.ER(*value)
93        return np.sum(weight*individual_radii) / np.sum(weight)
94
95
96def call_VR(model_info, pars):
97    # type: (ModelInfo, ParameterSet) -> float
98    """
99    Call the model VR function using *pars*.
100
101    *model_info* is either *model.info* if you have a loaded model,
102    or *kernel.info* if you have a model kernel prepared for evaluation.
103    """
104    if model_info.VR is None:
105        return 1.0
106    elif not model_info.parameters.form_volume_parameters:
107        # handle the case where ER is provided but model is not polydisperse
108        return model_info.VR()
109    else:
110        value, weight = _vol_pars(model_info, pars)
111        whole, part = model_info.VR(*value)
112        return np.sum(weight*part)/np.sum(weight*whole)
113
114
115def call_profile(model_info, **pars):
116    # type: (ModelInfo, ...) -> Tuple[np.ndarray, np.ndarray, Tuple[str, str]]
117    """
118    Returns the profile *x, y, (xlabel, ylabel)* representing the model.
119    """
120    args = {}
121    for p in model_info.parameters.kernel_parameters:
122        if p.length > 1:
123            value = np.array([pars.get(p.id+str(j), p.default)
124                              for j in range(1, p.length+1)])
125        else:
126            value = pars.get(p.id, p.default)
127        args[p.id] = value
128    x, y = model_info.profile(**args)
129    return x, y, model_info.profile_axes
130
131
132def get_weights(parameter, values):
133    # type: (Parameter, Dict[str, float]) -> Tuple[np.ndarray, np.ndarray]
134    """
135    Generate the distribution for parameter *name* given the parameter values
136    in *pars*.
137
138    Uses "name", "name_pd", "name_pd_type", "name_pd_n", "name_pd_sigma"
139    from the *pars* dictionary for parameter value and parameter dispersion.
140    """
141    value = float(values.get(parameter.name, parameter.default))
142    relative = parameter.relative_pd
143    limits = parameter.limits
144    disperser = values.get(parameter.name+'_pd_type', 'gaussian')
145    npts = values.get(parameter.name+'_pd_n', 0)
146    width = values.get(parameter.name+'_pd', 0.0)
147    nsigma = values.get(parameter.name+'_pd_nsigma', 3.0)
148    if npts == 0 or width == 0:
149        return [value], [1.0]
150    value, weight = weights.get_weights(
151        disperser, npts, width, nsigma, value, limits, relative)
152    return value, weight / np.sum(weight)
153
154
155def _vol_pars(model_info, pars):
156    # type: (ModelInfo, ParameterSet) -> Tuple[np.ndarray, np.ndarray]
157    vol_pars = [get_weights(p, pars)
158                for p in model_info.parameters.call_parameters
159                if p.type == 'volume']
160    #import pylab; pylab.plot(vol_pars[0][0],vol_pars[0][1]); pylab.show()
161    value, weight = dispersion_mesh(model_info, vol_pars)
162    return value, weight
163
164
165class DataMixin(object):
166    """
167    DataMixin captures the common aspects of evaluating a SAS model for a
168    particular data set, including calculating Iq and evaluating the
169    resolution function.  It is used in particular by :class:`DirectModel`,
170    which evaluates a SAS model parameters as key word arguments to the
171    calculator method, and by :class:`bumps_model.Experiment`, which wraps the
172    model and data for use with the Bumps fitting engine.  It is not
173    currently used by :class:`sasview_model.SasviewModel` since this will
174    require a number of changes to SasView before we can do it.
175
176    :meth:`_interpret_data` initializes the data structures necessary
177    to manage the calculations.  This sets attributes in the child class
178    such as *data_type* and *resolution*.
179
180    :meth:`_calc_theory` evaluates the model at the given control values.
181
182    :meth:`_set_data` sets the intensity data in the data object,
183    possibly with random noise added.  This is useful for simulating a
184    dataset with the results from :meth:`_calc_theory`.
185    """
186    def _interpret_data(self, data, model):
187        # type: (Data, KernelModel) -> None
188        # pylint: disable=attribute-defined-outside-init
189
190        self._data = data
191        self._model = model
192
193        # interpret data
194        if hasattr(data, 'isSesans') and data.isSesans:
195            self.data_type = 'sesans'
196        elif hasattr(data, 'qx_data'):
197            self.data_type = 'Iqxy'
198        elif getattr(data, 'oriented', False):
199            self.data_type = 'Iq-oriented'
200        else:
201            self.data_type = 'Iq'
202
203        if self.data_type == 'sesans':
204            q = sesans.make_q(data.sample.zacceptance, data.Rmax)
205            index = slice(None, None)
206            res = None
207            if data.y is not None:
208                Iq, dIq = data.y, data.dy
209            else:
210                Iq, dIq = None, None
211            #self._theory = np.zeros_like(q)
212            q_vectors = [q]
213            q_mono = sesans.make_all_q(data)
214        elif self.data_type == 'Iqxy':
215            #if not model.info.parameters.has_2d:
216            #    raise ValueError("not 2D without orientation or magnetic parameters")
217            q = np.sqrt(data.qx_data**2 + data.qy_data**2)
218            qmin = getattr(data, 'qmin', 1e-16)
219            qmax = getattr(data, 'qmax', np.inf)
220            accuracy = getattr(data, 'accuracy', 'Low')
221            index = ~data.mask & (q >= qmin) & (q <= qmax)
222            if data.data is not None:
223                index &= ~np.isnan(data.data)
224                Iq = data.data[index]
225                dIq = data.err_data[index]
226            else:
227                Iq, dIq = None, None
228            res = resolution2d.Pinhole2D(data=data, index=index,
229                                         nsigma=3.0, accuracy=accuracy)
230            #self._theory = np.zeros_like(self.Iq)
231            q_vectors = res.q_calc
232            q_mono = []
233        elif self.data_type == 'Iq':
234            index = (data.x >= data.qmin) & (data.x <= data.qmax)
235            if data.y is not None:
236                index &= ~np.isnan(data.y)
237                Iq = data.y[index]
238                dIq = data.dy[index]
239            else:
240                Iq, dIq = None, None
241            if getattr(data, 'dx', None) is not None:
242                q, dq = data.x[index], data.dx[index]
243                if (dq > 0).any():
244                    res = resolution.Pinhole1D(q, dq)
245                else:
246                    res = resolution.Perfect1D(q)
247            elif (getattr(data, 'dxl', None) is not None
248                  and getattr(data, 'dxw', None) is not None):
249                res = resolution.Slit1D(data.x[index],
250                                        qx_width=data.dxl[index],
251                                        qy_width=data.dxw[index])
252            else:
253                res = resolution.Perfect1D(data.x[index])
254
255            #self._theory = np.zeros_like(self.Iq)
256            q_vectors = [res.q_calc]
257            q_mono = []
258        elif self.data_type == 'Iq-oriented':
259            index = (data.x >= data.qmin) & (data.x <= data.qmax)
260            if data.y is not None:
261                index &= ~np.isnan(data.y)
262                Iq = data.y[index]
263                dIq = data.dy[index]
264            else:
265                Iq, dIq = None, None
266            if (getattr(data, 'dxl', None) is None
267                    or getattr(data, 'dxw', None) is None):
268                raise ValueError("oriented sample with 1D data needs slit resolution")
269
270            res = resolution2d.Slit2D(data.x[index],
271                                      qx_width=data.dxw[index],
272                                      qy_width=data.dxl[index])
273            q_vectors = res.q_calc
274            q_mono = []
275        else:
276            raise ValueError("Unknown data type") # never gets here
277
278        # Remember function inputs so we can delay loading the function and
279        # so we can save/restore state
280        self._kernel_inputs = q_vectors
281        self._kernel_mono_inputs = q_mono
282        self._kernel = None
283        self.Iq, self.dIq, self.index = Iq, dIq, index
284        self.resolution = res
285
286    def _set_data(self, Iq, noise=None):
287        # type: (np.ndarray, Optional[float]) -> None
288        # pylint: disable=attribute-defined-outside-init
289        if noise is not None:
290            self.dIq = Iq*noise*0.01
291        dy = self.dIq
292        y = Iq + np.random.randn(*dy.shape) * dy
293        self.Iq = y
294        if self.data_type in ('Iq', 'Iq-oriented'):
295            self._data.dy[self.index] = dy
296            self._data.y[self.index] = y
297        elif self.data_type == 'Iqxy':
298            self._data.data[self.index] = y
299        elif self.data_type == 'sesans':
300            self._data.y[self.index] = y
301        else:
302            raise ValueError("Unknown model")
303
304    def _calc_theory(self, pars, cutoff=0.0):
305        # type: (ParameterSet, float) -> np.ndarray
306        if self._kernel is None:
307            self._kernel = self._model.make_kernel(self._kernel_inputs)
308            self._kernel_mono = (
309                self._model.make_kernel(self._kernel_mono_inputs)
310                if self._kernel_mono_inputs else None)
311
312        Iq_calc = call_kernel(self._kernel, pars, cutoff=cutoff)
313        # Storing the calculated Iq values so that they can be plotted.
314        # Only applies to oriented USANS data for now.
315        # TODO: extend plotting of calculate Iq to other measurement types
316        # TODO: refactor so we don't store the result in the model
317        self.Iq_calc = None
318        if self.data_type == 'sesans':
319            Iq_mono = (call_kernel(self._kernel_mono, pars, mono=True)
320                       if self._kernel_mono_inputs else None)
321            result = sesans.transform(self._data,
322                                      self._kernel_inputs[0], Iq_calc,
323                                      self._kernel_mono_inputs, Iq_mono)
324        else:
325            result = self.resolution.apply(Iq_calc)
326            if hasattr(self.resolution, 'nx'):
327                self.Iq_calc = (
328                    self.resolution.qx_calc, self.resolution.qy_calc,
329                    np.reshape(Iq_calc, (self.resolution.ny, self.resolution.nx))
330                )
331        return result
332
333
334class DirectModel(DataMixin):
335    """
336    Create a calculator object for a model.
337
338    *data* is 1D SAS, 2D SAS or SESANS data
339
340    *model* is a model calculator return from :func:`generate.load_model`
341
342    *cutoff* is the polydispersity weight cutoff.
343    """
344    def __init__(self, data, model, cutoff=1e-5):
345        # type: (Data, KernelModel, float) -> None
346        self.model = model
347        self.cutoff = cutoff
348        # Note: _interpret_data defines the model attributes
349        self._interpret_data(data, model)
350
351    def __call__(self, **pars):
352        # type: (**float) -> np.ndarray
353        return self._calc_theory(pars, cutoff=self.cutoff)
354
355    def simulate_data(self, noise=None, **pars):
356        # type: (Optional[float], **float) -> None
357        """
358        Generate simulated data for the model.
359        """
360        Iq = self.__call__(**pars)
361        self._set_data(Iq, noise=noise)
362
363    def profile(self, **pars):
364        # type: (**float) -> None
365        """
366        Generate a plottable profile.
367        """
368        return call_profile(self.model.info, **pars)
369
370def main():
371    # type: () -> None
372    """
373    Program to evaluate a particular model at a set of q values.
374    """
375    import sys
376    from .data import empty_data1D, empty_data2D
377    from .core import load_model_info, build_model
378
379    if len(sys.argv) < 3:
380        print("usage: python -m sasmodels.direct_model modelname (q|qx,qy) par=val ...")
381        sys.exit(1)
382    model_name = sys.argv[1]
383    call = sys.argv[2].upper()
384    if call != "ER_VR":
385        try:
386            values = [float(v) for v in call.split(',')]
387        except Exception:
388            values = []
389        if len(values) == 1:
390            q, = values
391            data = empty_data1D([q])
392        elif len(values) == 2:
393            qx, qy = values
394            data = empty_data2D([qx], [qy])
395        else:
396            print("use q or qx,qy or ER or VR")
397            sys.exit(1)
398    else:
399        data = empty_data1D([0.001])  # Data not used in ER/VR
400
401    model_info = load_model_info(model_name)
402    model = build_model(model_info)
403    calculator = DirectModel(data, model)
404    pars = dict((k, (float(v) if not k.endswith("_pd_type") else v))
405                for pair in sys.argv[3:]
406                for k, v in [pair.split('=')])
407    if call == "ER_VR":
408        ER = call_ER(model_info, pars)
409        VR = call_VR(model_info, pars)
410        print(ER, VR)
411    else:
412        Iq = calculator(**pars)
413        print(Iq[0])
414
415if __name__ == "__main__":
416    main()
Note: See TracBrowser for help on using the repository browser.