source: sasmodels/sasmodels/mixture.py @ ecf895e

core_shell_microgelsmagnetic_modelticket-1257-vesicle-productticket_1156ticket_1265_superballticket_822_more_unit_tests
Last change on this file since ecf895e was ecf895e, checked in by Paul Kienzle <pkienzle@…>, 7 years ago

code tidying

  • Property mode set to 100644
File size: 12.1 KB
Line 
1"""
2Mixture model
3-------------
4
5The product model multiplies the structure factor by the form factor,
6modulated by the effective radius of the form.  The resulting model
7has a attributes of both the model description (with parameters, etc.)
8and the module evaluator (with call, release, etc.).
9
10To use it, first load form factor P and structure factor S, then create
11*ProductModel(P, S)*.
12"""
13from __future__ import print_function
14
15from copy import copy
16import numpy as np  # type: ignore
17
18from .modelinfo import Parameter, ParameterTable, ModelInfo
19from .kernel import KernelModel, Kernel
20from .details import make_details
21
22# pylint: disable=unused-import
23try:
24    from typing import List
25except ImportError:
26    pass
27# pylint: enable=unused-import
28
29def make_mixture_info(parts, operation='+'):
30    # type: (List[ModelInfo]) -> ModelInfo
31    """
32    Create info block for mixture model.
33    """
34    # Build new parameter list
35    combined_pars = []
36
37    all_parts = copy(parts)
38    is_flat = False
39    while not is_flat:
40        is_flat = True
41        for part in all_parts:
42            if part.composition and part.composition[0] == 'mixture' and \
43                len(part.composition[1]) > 1:
44                all_parts += part.composition[1]
45                all_parts.remove(part)
46                is_flat = False
47
48    # When creating a mixture model that is a sum of product models (ie (1*2)+(3*4))
49    # the parameters for models 1 & 2 will be prefixed with A & B respectively,
50    # but so will the parameters for models 3 & 4. We need to rename models 3 & 4
51    # so that they are prefixed with C & D to avoid overlap of parameter names.
52    used_prefixes = []
53    for part in parts:
54        i = 0
55        if part.composition and part.composition[0] == 'mixture':
56            npars_list = [info.parameters.npars for info in part.composition[1]]
57            for npars in npars_list:
58                # List of params of one of the constituent models of part
59                submodel_pars = part.parameters.kernel_parameters[i:i+npars]
60                # Prefix of the constituent model
61                prefix = submodel_pars[0].name[0]
62                if prefix not in used_prefixes: # Haven't seen this prefix so far
63                    used_prefixes.append(prefix)
64                    i += npars
65                    continue
66                while prefix in used_prefixes:
67                    # This prefix has been already used, so change it to the
68                    # next letter that hasn't been used
69                    prefix = chr(ord(prefix) + 1)
70                used_prefixes.append(prefix)
71                prefix += "_"
72                # Update the parameters of this constituent model to use the
73                # new prefix
74                for par in submodel_pars:
75                    par.id = prefix + par.id[2:]
76                    par.name = prefix + par.name[2:]
77                    if par.length_control is not None:
78                        par.length_control = prefix + par.length_control[2:]
79                i += npars
80
81    for part in parts:
82        # Parameter prefix per model, A_, B_, ...
83        # Note that prefix must also be applied to id and length_control
84        # to support vector parameters
85        prefix = ''
86        if not part.composition:
87            # Model isn't a composition model, so it's parameters don't have a
88            # a prefix. Add the next available prefix
89            prefix = chr(ord('A')+len(used_prefixes))
90            used_prefixes.append(prefix)
91            prefix += '_'
92
93        if operation == '+':
94            # If model is a sum model, each constituent model gets its own scale parameter
95            scale_prefix = prefix
96            if prefix == '' and getattr(part, "operation", '') == '*':
97                # `part` is a composition product model. Find the prefixes of
98                # it's parameters to form a new prefix for the scale.
99                # For example, a model with A*B*C will have ABC_scale.
100                sub_prefixes = []
101                for param in part.parameters.kernel_parameters:
102                    # Prefix of constituent model
103                    sub_prefix = param.id.split('_')[0]
104                    if sub_prefix not in sub_prefixes:
105                        sub_prefixes.append(sub_prefix)
106                # Concatenate sub_prefixes to form prefix for the scale
107                scale_prefix = ''.join(sub_prefixes) + '_'
108            scale = Parameter(scale_prefix + 'scale', default=1.0,
109                              description="model intensity for " + part.name)
110            combined_pars.append(scale)
111        for p in part.parameters.kernel_parameters:
112            p = copy(p)
113            p.name = prefix + p.name
114            p.id = prefix + p.id
115            if p.length_control is not None:
116                p.length_control = prefix + p.length_control
117            combined_pars.append(p)
118    parameters = ParameterTable(combined_pars)
119    parameters.max_pd = sum(part.parameters.max_pd for part in parts)
120
121    def random():
122        combined_pars = {}
123        for k, part in enumerate(parts):
124            prefix = chr(ord('A')+k) + '_'
125            pars = part.random()
126            combined_pars.update((prefix+k, v) for k, v in pars.items())
127        return combined_pars
128
129    model_info = ModelInfo()
130    model_info.id = operation.join(part.id for part in parts)
131    model_info.operation = operation
132    model_info.name = '(' + operation.join(part.name for part in parts) + ')'
133    model_info.filename = None
134    model_info.title = 'Mixture model with ' + model_info.name
135    model_info.description = model_info.title
136    model_info.docs = model_info.title
137    model_info.category = "custom"
138    model_info.parameters = parameters
139    model_info.random = random
140    #model_info.single = any(part['single'] for part in parts)
141    model_info.structure_factor = False
142    model_info.variant_info = None
143    #model_info.tests = []
144    #model_info.source = []
145    # Iq, Iqxy, form_volume, ER, VR and sesans
146    # Remember the component info blocks so we can build the model
147    model_info.composition = ('mixture', parts)
148    return model_info
149
150
151class MixtureModel(KernelModel):
152    def __init__(self, model_info, parts):
153        # type: (ModelInfo, List[KernelModel]) -> None
154        self.info = model_info
155        self.parts = parts
156        self.dtype = parts[0].dtype
157
158    def make_kernel(self, q_vectors):
159        # type: (List[np.ndarray]) -> MixtureKernel
160        # Note: may be sending the q_vectors to the n times even though they
161        # are only needed once.  It would mess up modularity quite a bit to
162        # handle this optimally, especially since there are many cases where
163        # separate q vectors are needed (e.g., form in python and structure
164        # in opencl; or both in opencl, but one in single precision and the
165        # other in double precision).
166        kernels = [part.make_kernel(q_vectors) for part in self.parts]
167        return MixtureKernel(self.info, kernels)
168
169    def release(self):
170        # type: () -> None
171        """
172        Free resources associated with the model.
173        """
174        for part in self.parts:
175            part.release()
176
177
178class MixtureKernel(Kernel):
179    def __init__(self, model_info, kernels):
180        # type: (ModelInfo, List[Kernel]) -> None
181        self.dim = kernels[0].dim
182        self.info = model_info
183        self.kernels = kernels
184        self.dtype = self.kernels[0].dtype
185        self.operation = model_info.operation
186        self.results = []  # type: List[np.ndarray]
187
188    def __call__(self, call_details, values, cutoff, magnetic):
189        # type: (CallDetails, np.ndarray, np.ndarry, float, bool) -> np.ndarray
190        scale, background = values[0:2]
191        total = 0.0
192        # remember the parts for plotting later
193        self.results = []  # type: List[np.ndarray]
194        parts = MixtureParts(self.info, self.kernels, call_details, values)
195        for kernel, kernel_details, kernel_values in parts:
196            #print("calling kernel", kernel.info.name)
197            result = kernel(kernel_details, kernel_values, cutoff, magnetic)
198            result = np.array(result).astype(kernel.dtype)
199            # print(kernel.info.name, result)
200            if self.operation == '+':
201                total += result
202            elif self.operation == '*':
203                if np.all(total) == 0.0:
204                    total = result
205                else:
206                    total *= result
207            self.results.append(result)
208
209        return scale*total + background
210
211    def release(self):
212        # type: () -> None
213        for k in self.kernels:
214            k.release()
215
216
217class MixtureParts(object):
218    def __init__(self, model_info, kernels, call_details, values):
219        # type: (ModelInfo, List[Kernel], CallDetails, np.ndarray) -> None
220        self.model_info = model_info
221        self.parts = model_info.composition[1]
222        self.kernels = kernels
223        self.call_details = call_details
224        self.values = values
225        self.spin_index = model_info.parameters.npars + 2
226        #call_details.show(values)
227
228    def __iter__(self):
229        # type: () -> PartIterable
230        self.part_num = 0
231        self.par_index = 2
232        self.mag_index = self.spin_index + 3
233        return self
234
235    def __next__(self):
236        # type: () -> Tuple[List[Callable], CallDetails, np.ndarray]
237        if self.part_num >= len(self.parts):
238            raise StopIteration()
239        info = self.parts[self.part_num]
240        kernel = self.kernels[self.part_num]
241        call_details = self._part_details(info, self.par_index)
242        values = self._part_values(info, self.par_index, self.mag_index)
243        values = values.astype(kernel.dtype)
244        #call_details.show(values)
245
246        self.part_num += 1
247        self.par_index += info.parameters.npars
248        if self.model_info.operation == '+':
249            self.par_index += 1 # Account for each constituent model's scale param
250        self.mag_index += 3 * len(info.parameters.magnetism_index)
251
252        return kernel, call_details, values
253
254    # CRUFT: py2 support
255    next = __next__
256
257    def _part_details(self, info, par_index):
258        # type: (ModelInfo, int) -> CallDetails
259        full = self.call_details
260        # par_index is index into values array of the current parameter,
261        # which includes the initial scale and background parameters.
262        # We want the index into the weight length/offset for each parameter.
263        # Exclude the initial scale and background, so subtract two. If we're
264        # building an addition model, each component has its own scale factor
265        # which we need to skip when constructing the details for the kernel, so
266        # add one, giving a net subtract one.
267        diff = 1 if self.model_info.operation == '+' else 2
268        index = slice(par_index - diff, par_index - diff + info.parameters.npars)
269        length = full.length[index]
270        offset = full.offset[index]
271        # The complete weight vector is being sent to each part so that
272        # offsets don't need to be adjusted.
273        part = make_details(info, length, offset, full.num_weights)
274        return part
275
276    def _part_values(self, info, par_index, mag_index):
277        # type: (ModelInfo, int, int) -> np.ndarray
278        # Set each constituent model's scale to 1 if this is a multiplication model
279        scale = self.values[par_index] if self.model_info.operation == '+' else 1.0
280        diff = 1 if self.model_info.operation == '+' else 0 # Skip scale if addition model
281        pars = self.values[par_index + diff:par_index + info.parameters.npars + diff]
282        nmagnetic = len(info.parameters.magnetism_index)
283        if nmagnetic:
284            spin_state = self.values[self.spin_index:self.spin_index + 3]
285            mag_index = self.values[mag_index:mag_index + 3 * nmagnetic]
286        else:
287            spin_state = []
288            mag_index = []
289        nvalues = self.model_info.parameters.nvalues
290        nweights = self.call_details.num_weights
291        weights = self.values[nvalues:nvalues+2*nweights]
292        zero = self.values.dtype.type(0.)
293        values = [[scale, zero], pars, spin_state, mag_index, weights]
294        # Pad value array to a 32 value boundary
295        spacer = (32 - sum(len(v) for v in values)%32)%32
296        values.append([zero]*spacer)
297        values = np.hstack(values).astype(self.kernels[0].dtype)
298        return values
Note: See TracBrowser for help on using the repository browser.