source: sasmodels/sasmodels/mixture.py @ 15c80af

core_shell_microgelsmagnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since 15c80af was 15c80af, checked in by Paul Kienzle <pkienzle@…>, 5 years ago

propagate first multiplicity parameter to sum model. Refs #1022.

  • Property mode set to 100644
File size: 12.3 KB
Line 
1"""
2Mixture model
3-------------
4
5The product model multiplies the structure factor by the form factor,
6modulated by the effective radius of the form.  The resulting model
7has a attributes of both the model description (with parameters, etc.)
8and the module evaluator (with call, release, etc.).
9
10To use it, first load form factor P and structure factor S, then create
11*ProductModel(P, S)*.
12"""
13from __future__ import print_function
14
15from copy import copy
16import numpy as np  # type: ignore
17
18from .modelinfo import Parameter, ParameterTable, ModelInfo
19from .kernel import KernelModel, Kernel
20from .details import make_details
21
22# pylint: disable=unused-import
23try:
24    from typing import List
25except ImportError:
26    pass
27# pylint: enable=unused-import
28
29def make_mixture_info(parts, operation='+'):
30    # type: (List[ModelInfo]) -> ModelInfo
31    """
32    Create info block for mixture model.
33    """
34    # Build new parameter list
35    combined_pars = []
36    control = None
37
38    all_parts = copy(parts)
39    is_flat = False
40    while not is_flat:
41        is_flat = True
42        for part in all_parts:
43            if part.composition and part.composition[0] == 'mixture' and \
44                len(part.composition[1]) > 1:
45                all_parts += part.composition[1]
46                all_parts.remove(part)
47                is_flat = False
48
49    # When creating a mixture model that is a sum of product models (ie (1*2)+(3*4))
50    # the parameters for models 1 & 2 will be prefixed with A & B respectively,
51    # but so will the parameters for models 3 & 4. We need to rename models 3 & 4
52    # so that they are prefixed with C & D to avoid overlap of parameter names.
53    used_prefixes = []
54    for part in parts:
55        i = 0
56        if part.composition and part.composition[0] == 'mixture':
57            npars_list = [info.parameters.npars for info in part.composition[1]]
58            for npars in npars_list:
59                # List of params of one of the constituent models of part
60                submodel_pars = part.parameters.kernel_parameters[i:i+npars]
61                # Prefix of the constituent model
62                prefix = submodel_pars[0].name[0]
63                if prefix not in used_prefixes: # Haven't seen this prefix so far
64                    used_prefixes.append(prefix)
65                    i += npars
66                    continue
67                while prefix in used_prefixes:
68                    # This prefix has been already used, so change it to the
69                    # next letter that hasn't been used
70                    prefix = chr(ord(prefix) + 1)
71                used_prefixes.append(prefix)
72                prefix += "_"
73                # Update the parameters of this constituent model to use the
74                # new prefix
75                for par in submodel_pars:
76                    par.id = prefix + par.id[2:]
77                    par.name = prefix + par.name[2:]
78                    if par.length_control is not None:
79                        par.length_control = prefix + par.length_control[2:]
80                i += npars
81
82    for part in parts:
83        # Parameter prefix per model, A_, B_, ...
84        # Note that prefix must also be applied to id and length_control
85        # to support vector parameters
86        prefix = ''
87        if not part.composition:
88            # Model isn't a composition model, so it's parameters don't have a
89            # a prefix. Add the next available prefix
90            prefix = chr(ord('A')+len(used_prefixes))
91            used_prefixes.append(prefix)
92            prefix += '_'
93
94        if operation == '+':
95            # If model is a sum model, each constituent model gets its own scale parameter
96            scale_prefix = prefix
97            if prefix == '' and getattr(part, "operation", '') == '*':
98                # `part` is a composition product model. Find the prefixes of
99                # it's parameters to form a new prefix for the scale.
100                # For example, a model with A*B*C will have ABC_scale.
101                sub_prefixes = []
102                for param in part.parameters.kernel_parameters:
103                    # Prefix of constituent model
104                    sub_prefix = param.id.split('_')[0]
105                    if sub_prefix not in sub_prefixes:
106                        sub_prefixes.append(sub_prefix)
107                # Concatenate sub_prefixes to form prefix for the scale
108                scale_prefix = ''.join(sub_prefixes) + '_'
109            scale = Parameter(scale_prefix + 'scale', default=1.0,
110                              description="model intensity for " + part.name)
111            combined_pars.append(scale)
112        for p in part.parameters.kernel_parameters:
113            p = copy(p)
114            p.name = prefix + p.name
115            p.id = prefix + p.id
116            if p.length_control is not None:
117                p.length_control = prefix + p.length_control
118            combined_pars.append(p)
119            if p.is_control and control is None:
120                control = p.id
121    parameters = ParameterTable(combined_pars)
122    parameters.max_pd = sum(part.parameters.max_pd for part in parts)
123
124    def random():
125        combined_pars = {}
126        for k, part in enumerate(parts):
127            prefix = chr(ord('A')+k) + '_'
128            pars = part.random()
129            combined_pars.update((prefix+k, v) for k, v in pars.items())
130        return combined_pars
131
132    model_info = ModelInfo()
133    model_info.id = operation.join(part.id for part in parts)
134    model_info.control = control
135    model_info.operation = operation
136    model_info.name = '(' + operation.join(part.name for part in parts) + ')'
137    model_info.filename = None
138    model_info.title = 'Mixture model with ' + model_info.name
139    model_info.description = model_info.title
140    model_info.docs = model_info.title
141    model_info.category = "custom"
142    model_info.parameters = parameters
143    model_info.random = random
144    #model_info.single = any(part['single'] for part in parts)
145    model_info.structure_factor = False
146    model_info.variant_info = None
147    #model_info.tests = []
148    #model_info.source = []
149    # Iq, Iqxy, form_volume, ER, VR and sesans
150    # Remember the component info blocks so we can build the model
151    model_info.composition = ('mixture', parts)
152    return model_info
153
154
155class MixtureModel(KernelModel):
156    def __init__(self, model_info, parts):
157        # type: (ModelInfo, List[KernelModel]) -> None
158        self.info = model_info
159        self.parts = parts
160        self.dtype = parts[0].dtype
161
162    def make_kernel(self, q_vectors):
163        # type: (List[np.ndarray]) -> MixtureKernel
164        # Note: may be sending the q_vectors to the n times even though they
165        # are only needed once.  It would mess up modularity quite a bit to
166        # handle this optimally, especially since there are many cases where
167        # separate q vectors are needed (e.g., form in python and structure
168        # in opencl; or both in opencl, but one in single precision and the
169        # other in double precision).
170        kernels = [part.make_kernel(q_vectors) for part in self.parts]
171        return MixtureKernel(self.info, kernels)
172
173    def release(self):
174        # type: () -> None
175        """
176        Free resources associated with the model.
177        """
178        for part in self.parts:
179            part.release()
180
181
182class MixtureKernel(Kernel):
183    def __init__(self, model_info, kernels):
184        # type: (ModelInfo, List[Kernel]) -> None
185        self.dim = kernels[0].dim
186        self.info = model_info
187        self.kernels = kernels
188        self.dtype = self.kernels[0].dtype
189        self.operation = model_info.operation
190        self.results = []  # type: List[np.ndarray]
191
192    def __call__(self, call_details, values, cutoff, magnetic):
193        # type: (CallDetails, np.ndarray, np.ndarry, float, bool) -> np.ndarray
194        scale, background = values[0:2]
195        total = 0.0
196        # remember the parts for plotting later
197        self.results = []  # type: List[np.ndarray]
198        parts = MixtureParts(self.info, self.kernels, call_details, values)
199        for kernel, kernel_details, kernel_values in parts:
200            #print("calling kernel", kernel.info.name)
201            result = kernel(kernel_details, kernel_values, cutoff, magnetic)
202            result = np.array(result).astype(kernel.dtype)
203            # print(kernel.info.name, result)
204            if self.operation == '+':
205                total += result
206            elif self.operation == '*':
207                if np.all(total) == 0.0:
208                    total = result
209                else:
210                    total *= result
211            self.results.append(result)
212
213        return scale*total + background
214
215    def release(self):
216        # type: () -> None
217        for k in self.kernels:
218            k.release()
219
220
221class MixtureParts(object):
222    def __init__(self, model_info, kernels, call_details, values):
223        # type: (ModelInfo, List[Kernel], CallDetails, np.ndarray) -> None
224        self.model_info = model_info
225        self.parts = model_info.composition[1]
226        self.kernels = kernels
227        self.call_details = call_details
228        self.values = values
229        self.spin_index = model_info.parameters.npars + 2
230        #call_details.show(values)
231
232    def __iter__(self):
233        # type: () -> PartIterable
234        self.part_num = 0
235        self.par_index = 2
236        self.mag_index = self.spin_index + 3
237        return self
238
239    def __next__(self):
240        # type: () -> Tuple[List[Callable], CallDetails, np.ndarray]
241        if self.part_num >= len(self.parts):
242            raise StopIteration()
243        info = self.parts[self.part_num]
244        kernel = self.kernels[self.part_num]
245        call_details = self._part_details(info, self.par_index)
246        values = self._part_values(info, self.par_index, self.mag_index)
247        values = values.astype(kernel.dtype)
248        #call_details.show(values)
249
250        self.part_num += 1
251        self.par_index += info.parameters.npars
252        if self.model_info.operation == '+':
253            self.par_index += 1 # Account for each constituent model's scale param
254        self.mag_index += 3 * len(info.parameters.magnetism_index)
255
256        return kernel, call_details, values
257
258    # CRUFT: py2 support
259    next = __next__
260
261    def _part_details(self, info, par_index):
262        # type: (ModelInfo, int) -> CallDetails
263        full = self.call_details
264        # par_index is index into values array of the current parameter,
265        # which includes the initial scale and background parameters.
266        # We want the index into the weight length/offset for each parameter.
267        # Exclude the initial scale and background, so subtract two. If we're
268        # building an addition model, each component has its own scale factor
269        # which we need to skip when constructing the details for the kernel, so
270        # add one, giving a net subtract one.
271        diff = 1 if self.model_info.operation == '+' else 2
272        index = slice(par_index - diff, par_index - diff + info.parameters.npars)
273        length = full.length[index]
274        offset = full.offset[index]
275        # The complete weight vector is being sent to each part so that
276        # offsets don't need to be adjusted.
277        part = make_details(info, length, offset, full.num_weights)
278        return part
279
280    def _part_values(self, info, par_index, mag_index):
281        # type: (ModelInfo, int, int) -> np.ndarray
282        # Set each constituent model's scale to 1 if this is a multiplication model
283        scale = self.values[par_index] if self.model_info.operation == '+' else 1.0
284        diff = 1 if self.model_info.operation == '+' else 0 # Skip scale if addition model
285        pars = self.values[par_index + diff:par_index + info.parameters.npars + diff]
286        nmagnetic = len(info.parameters.magnetism_index)
287        if nmagnetic:
288            spin_state = self.values[self.spin_index:self.spin_index + 3]
289            mag_index = self.values[mag_index:mag_index + 3 * nmagnetic]
290        else:
291            spin_state = []
292            mag_index = []
293        nvalues = self.model_info.parameters.nvalues
294        nweights = self.call_details.num_weights
295        weights = self.values[nvalues:nvalues+2*nweights]
296        zero = self.values.dtype.type(0.)
297        values = [[scale, zero], pars, spin_state, mag_index, weights]
298        # Pad value array to a 32 value boundary
299        spacer = (32 - sum(len(v) for v in values)%32)%32
300        values.append([zero]*spacer)
301        values = np.hstack(values).astype(self.kernels[0].dtype)
302        return values
Note: See TracBrowser for help on using the repository browser.