Changes in / [ba7302a:2573fa1] in sasmodels


Ignore:
Files:
2 added
4 edited

Legend:

Unmodified
Added
Removed
  • example/oriented_usans.py

    r74b0495 r1cd24b4  
    66 
    77# Spherical particle data, not ellipsoids 
    8 sans, usans = load_data('latex_smeared.xml', index='all') 
     8sans, usans = load_data('../../sasview/sasview/test/1d_data/latex_smeared.xml') 
    99usans.qmin, usans.qmax = np.min(usans.x), np.max(usans.x) 
    1010usans.mask = (usans.x < 0.0) 
  • example/simul_fit.py

    r74b0495 r1a4d4c0  
     1#!/usr/bin/env python 
     2# -*- coding: utf-8 -*- 
     3 
     4# To Sasview/documents/scripts 
     5 
    16from bumps.names import * 
    27from sasmodels.core import load_model 
     
    49from sasmodels.data import load_data, plot_data 
    510 
    6 # latex data, same sample usans and sans 
    7 # particles radius ~2300, uniform dispersity 
    8 datasets = load_data('latex_smeared.xml', index='all') 
    9 #[print(data) for data in datasets] 
    1011 
    11 # A single sphere model to share between the datasets.  We will use 
    12 # FreeVariables below to set the parameters that are independent between 
    13 # the datasets. 
    14 kernel = load_model('sphere') 
    15 pars = dict(scale=0.01, background=0.0, sld=5.0, sld_solvent=0.0, radius=1500., 
    16             #radius_pd=0.1, radius_pd_n=35, 
    17             ) 
     12""" IMPORT THE DATA USED """ 
     13datafiles = ['latex_smeared_out_0.txt', 'latex_smeared_out_1.txt'] 
     14datasets = [load_data(el) for el in datafiles] 
     15 
     16for data in datasets: 
     17    data.qmin = 0.0 
     18    data.qmax = 10.0 
     19 
     20#sphere model 
     21kernel = load_model('sphere', dtype="single") 
     22pars = dict(scale=0.01, background=0.0, sld=1.0, sld_solvent=6.0, radius=1500.) 
    1823model = Model(kernel, **pars) 
     24model.radius.range(0, inf) 
     25#model.background.range(-inf, inf) 
     26#model.scale.range(0, inf) 
     27model.sld.range(-inf, inf) 
     28model.sld_solvent.range(-inf, inf) 
    1929 
    20 # radius and polydispersity (if any) are shared 
    21 model.radius.range(0, inf) 
    22 #model.radius_pd.range(0, 1) 
    23  
    24 # Contrast and dilution are the same for both measurements, but are not 
    25 # separable with a single measurement (i.e., I(q) ~ F(q) contrast^2 Vf), 
    26 # so fit one of scale, sld or solvent sld.  With absolute scaling from 
    27 # data reduction, can use the same parameter for both datasets. 
    28 model.scale.range(0, inf) 
    29 #model.sld.range(-inf, inf) 
    30 #model.sld_solvent.range(-inf, inf) 
    31  
    32 # Background is different for sans and usans so set it as a free variable 
    33 # in the model. 
    3430free = FreeVariables( 
    35     names=[data.run[0] for data in datasets], 
     31    names=[data.filename for data in datasets], 
    3632    background=model.background, 
     33    scale=model.scale, 
    3734    ) 
    3835free.background.range(-inf, inf) 
     36free.scale.range(0, inf) 
    3937 
    40 # Note: can access the parameters for the individual models using 
    41 # free.background[0] and free.background[1], setting constraints or 
    42 # ranges as appropriate. 
    43  
    44 # For more complex systems where different datasets require independent models, 
    45 # separate models can be defined, with parameters tied together using 
    46 # constraint expressions.  For example, the following could be used to fit 
    47 # data set 1 to spheres and data set 2 to cylinders of the same volume: 
    48 #    model1 = Model(load_model('sphere')) 
    49 #    model2 = Model(load_model('cylinder')) 
    50 #    model1.sld = model2.sld 
    51 #    model1.sld_solvent = model2.sld_solvent 
    52 #    model1.scale = model2.scale 
    53 #    # set cylinders and spheres to the same volume 
    54 #    model1.radius = (3/4*model2.radius**2*model2.length)**(1/3) 
    55 #    model1.background.range(0, 2) 
    56 #    model2.background.range(0, 2) 
    57  
    58 # Setup the experiments, sharing the same model across all datasets. 
    59 M = [Experiment(data=data, model=model, name=data.run[0]) for data in datasets] 
     38M = [Experiment(data=data, model=model) for data in datasets] 
    6039 
    6140problem = FitProblem(M, freevars=free) 
     41 
     42print(problem._parameters) 
  • sasmodels/bumps_model.py

    r74b0495 r3330bb4  
    133133    """ 
    134134    _cache = None # type: Dict[str, np.ndarray] 
    135     def __init__(self, data, model, cutoff=1e-5, name=None): 
     135    def __init__(self, data, model, cutoff=1e-5): 
    136136        # type: (Data, Model, float) -> None 
    137137        # remember inputs so we can inspect from outside 
    138         self.name = data.filename if name is None else name 
    139138        self.model = model 
    140139        self.cutoff = cutoff 
     
    205204        """ 
    206205        data, theory, resid = self._data, self.theory(), self.residuals() 
    207         # TODO: hack to display oriented usans 2-D pattern 
    208         Iq_calc = self.Iq_calc if isinstance(self.Iq_calc, tuple) else None 
    209         plot_theory(data, theory, resid, view, Iq_calc=Iq_calc) 
     206        plot_theory(data, theory, resid, view, Iq_calc=self.Iq_calc) 
    210207 
    211208    def simulate_data(self, noise=None): 
  • sasmodels/data.py

    r09141ff rced5bd2  
    4444    Data = Union["Data1D", "Data2D", "SesansData"] 
    4545 
    46 def load_data(filename, index=0): 
     46def load_data(filename): 
    4747    # type: (str) -> Data 
    4848    """ 
     
    5555        filename, indexstr = filename[:-1].split('[') 
    5656        index = int(indexstr) 
     57    else: 
     58        index = None 
    5759    datasets = loader.load(filename) 
    58     if not datasets:  # None or [] 
     60    if datasets is None: 
    5961        raise IOError("Data %r could not be loaded" % filename) 
    6062    if not isinstance(datasets, list): 
    6163        datasets = [datasets] 
    62     for data in datasets: 
    63         if hasattr(data, 'x'): 
    64             data.qmin, data.qmax = data.x.min(), data.x.max() 
    65             data.mask = (np.isnan(data.y) if data.y is not None 
    66                         else np.zeros_like(data.x, dtype='bool')) 
    67         elif hasattr(data, 'qx_data'): 
    68             data.mask = ~data.mask 
    69     return datasets[index] if index != 'all' else datasets 
     64    if index is None and len(datasets) > 1: 
     65        raise ValueError("Need to specify filename[index] for multipart data") 
     66    data = datasets[index if index is not None else 0] 
     67    if hasattr(data, 'x'): 
     68        data.qmin, data.qmax = data.x.min(), data.x.max() 
     69        data.mask = (np.isnan(data.y) if data.y is not None 
     70                     else np.zeros_like(data.x, dtype='bool')) 
     71    elif hasattr(data, 'qx_data'): 
     72        data.mask = ~data.mask 
     73    return data 
    7074 
    7175 
Note: See TracChangeset for help on using the changeset viewer.