Changeset 8fa3fb8 in sasview for src/sas/sascalc
- Timestamp:
- Mar 1, 2017 9:46:54 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- a97aebd
- Parents:
- cb1e9a5 (diff), 775e0b7 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/data_util/qsmearing.py
rf8aa738 r775e0b7 14 14 import sys 15 15 16 import numpy as np # type: ignore 17 from numpy import pi, exp # type:ignore 18 16 19 from sasmodels.resolution import Slit1D, Pinhole1D 20 from sasmodels.sesans import SesansTransform 17 21 from sasmodels.resolution2d import Pinhole2D 22 from .nxsunit import Converter 18 23 19 24 def smear_selection(data, model = None): … … 36 41 # Sanity check. If we are not dealing with a SAS Data1D 37 42 # object, just return None 43 # This checks for 2D data (does not throw exception because fail is common) 38 44 if data.__class__.__name__ not in ['Data1D', 'Theory1D']: 39 45 if data == None: … … 41 47 elif data.dqx_data == None or data.dqy_data == None: 42 48 return None 43 return P inhole2D(data)44 49 return PySmear2D(data) 50 # This checks for 1D data with smearing info in the data itself (again, fail is likely; no exceptions) 45 51 if not hasattr(data, "dx") and not hasattr(data, "dxl")\ 46 52 and not hasattr(data, "dxw"): … … 48 54 49 55 # Look for resolution smearing data 56 # This is the code that checks for SESANS data; it looks for the file loader 57 # TODO: change other sanity checks to check for file loader instead of data structure? 58 _found_sesans = False 59 #if data.dx is not None and data.meta_data['loader']=='SESANS': 60 if data.dx is not None and data.isSesans: 61 #if data.dx[0] > 0.0: 62 if numpy.size(data.dx[data.dx <= 0]) == 0: 63 _found_sesans = True 64 # if data.dx[0] <= 0.0: 65 if numpy.size(data.dx[data.dx <= 0]) > 0: 66 raise ValueError('one or more of your dx values are negative, please check the data file!') 67 68 if _found_sesans == True: 69 #Pre-compute the Hankel matrix (H) 70 qmax, qunits = data.sample.zacceptance 71 SElength = Converter(data._xunit)(data.x, "A") 72 zaccept = Converter(qunits)(qmax, "1/A"), 73 Rmax = 10000000 74 hankel = SesansTransform(data.x, SElength, zaccept, Rmax) 75 # Then return the actual transform, as if it were a smearing function 76 return PySmear(hankel, model, offset=0) 77 50 78 _found_resolution = False 51 79 if data.dx is not None and len(data.dx) == len(data.x): … … 89 117 Wrapper for pure python sasmodels resolution functions. 90 118 """ 91 def __init__(self, resolution, model ):119 def __init__(self, resolution, model, offset=None): 92 120 self.model = model 93 121 self.resolution = resolution 94 self.offset = numpy.searchsorted(self.resolution.q_calc, self.resolution.q[0]) 122 if offset is None: 123 offset = numpy.searchsorted(self.resolution.q_calc, self.resolution.q[0]) 124 self.offset = offset 95 125 96 126 def apply(self, iq_in, first_bin=0, last_bin=None): … … 142 172 width = data.dx if data.dx is not None else 0 143 173 return PySmear(Pinhole1D(q, width), model) 174 175 176 class PySmear2D(object): 177 """ 178 Q smearing class for SAS 2d pinhole data 179 """ 180 181 def __init__(self, data=None, model=None): 182 self.data = data 183 self.model = model 184 self.accuracy = 'Low' 185 self.limit = 3.0 186 self.index = None 187 self.coords = 'polar' 188 self.smearer = True 189 190 def set_accuracy(self, accuracy='Low'): 191 """ 192 Set accuracy. 193 194 :param accuracy: string 195 """ 196 self.accuracy = accuracy 197 198 def set_smearer(self, smearer=True): 199 """ 200 Set whether or not smearer will be used 201 202 :param smearer: smear object 203 204 """ 205 self.smearer = smearer 206 207 def set_data(self, data=None): 208 """ 209 Set data. 210 211 :param data: DataLoader.Data_info type 212 """ 213 self.data = data 214 215 def set_model(self, model=None): 216 """ 217 Set model. 218 219 :param model: sas.models instance 220 """ 221 self.model = model 222 223 def set_index(self, index=None): 224 """ 225 Set index. 226 227 :param index: 1d arrays 228 """ 229 self.index = index 230 231 def get_value(self): 232 """ 233 Over sampling of r_nbins times phi_nbins, calculate Gaussian weights, 234 then find smeared intensity 235 """ 236 if self.smearer: 237 res = Pinhole2D(data=self.data, index=self.index, 238 nsigma=3.0, accuracy=self.accuracy, 239 coords=self.coords) 240 val = self.model.evalDistribution(res.q_calc) 241 return res.apply(val) 242 else: 243 index = self.index if self.index is not None else slice(None) 244 qx_data = self.data.qx_data[index] 245 qy_data = self.data.qy_data[index] 246 q_calc = [qx_data, qy_data] 247 val = self.model.evalDistribution(q_calc) 248 return val 249 -
src/sas/sascalc/dataloader/data_info.py
r1b1a1c1 r2ffe241 25 25 import numpy 26 26 import math 27 28 class plottable_sesans1D(object):29 """30 SESANS is a place holder for 1D SESANS plottables.31 32 #TODO: This was directly copied from the plottables_1D. Modified Somewhat.33 #Class has been updated.34 """35 # The presence of these should be mutually36 # exclusive with the presence of Qdev (dx)37 x = None38 y = None39 lam = None40 dx = None41 dy = None42 dlam = None43 ## Slit smearing length44 dxl = None45 ## Slit smearing width46 dxw = None47 48 # Units49 _xaxis = ''50 _xunit = ''51 _yaxis = ''52 _yunit = ''53 54 def __init__(self, x, y, lam, dx=None, dy=None, dlam=None):55 # print "SESANS plottable working"56 self.x = numpy.asarray(x)57 self.y = numpy.asarray(y)58 self.lam = numpy.asarray(lam)59 if dx is not None:60 self.dx = numpy.asarray(dx)61 if dy is not None:62 self.dy = numpy.asarray(dy)63 if dlam is not None:64 self.dlam = numpy.asarray(dlam)65 66 def xaxis(self, label, unit):67 """68 set the x axis label and unit69 """70 self._xaxis = label71 self._xunit = unit72 73 def yaxis(self, label, unit):74 """75 set the y axis label and unit76 """77 self._yaxis = label78 self._yunit = unit79 80 27 81 28 class plottable_1D(object): … … 93 40 ## Slit smearing width 94 41 dxw = None 42 ## SESANS specific params (wavelengths for spin echo length calculation) 43 lam = None 44 dlam = None 95 45 96 46 # Units … … 100 50 _yunit = '' 101 51 102 def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None ):52 def __init__(self, x, y, dx=None, dy=None, dxl=None, dxw=None, lam=None, dlam=None): 103 53 self.x = numpy.asarray(x) 104 54 self.y = numpy.asarray(y) … … 111 61 if dxw is not None: 112 62 self.dxw = numpy.asarray(dxw) 63 if lam is not None: 64 self.lam = numpy.asarray(lam) 65 if dlam is not None: 66 self.dlam = numpy.asarray(dlam) 113 67 114 68 def xaxis(self, label, unit): … … 398 352 ## Details 399 353 details = None 354 ## SESANS zacceptance 355 zacceptance = None 400 356 401 357 def __init__(self): … … 535 491 ## Loading errors 536 492 errors = None 493 ## SESANS data check 494 isSesans = None 495 537 496 538 497 def __init__(self): … … 567 526 ## Loading errors 568 527 self.errors = [] 528 ## SESANS data check 529 self.isSesans = False 569 530 570 531 def append_empty_process(self): … … 586 547 _str += "Title: %s\n" % self.title 587 548 _str += "Run: %s\n" % str(self.run) 549 _str += "SESANS: %s\n" % str(self.isSesans) 588 550 _str += "Instrument: %s\n" % str(self.instrument) 589 551 _str += "%s\n" % str(self.sample) … … 736 698 return self._perform_union(other) 737 699 738 class SESANSData1D(plottable_sesans1D, DataInfo): 739 """ 740 SESANS 1D data class 741 """ 742 x_unit = 'nm' 743 y_unit = 'pol' 744 745 def __init__(self, x=None, y=None, lam=None, dx=None, dy=None, dlam=None): 700 class Data1D(plottable_1D, DataInfo): 701 """ 702 1D data class 703 """ 704 def __init__(self, x=None, y=None, dx=None, dy=None, lam=None, dlam=None, isSesans=None): 746 705 DataInfo.__init__(self) 747 plottable_sesans1D.__init__(self, x, y, lam, dx, dy, dlam) 706 plottable_1D.__init__(self, x, y, dx, dy,None, None, lam, dlam) 707 self.isSesans = isSesans 708 try: 709 if self.isSesans: # the data is SESANS 710 self.x_unit = 'A' 711 self.y_unit = 'pol' 712 elif not self.isSesans: # the data is SANS 713 self.x_unit = '1/A' 714 self.y_unit = '1/cm' 715 except: # the data is not recognized/supported, and the user is notified 716 raise(TypeError, 'data not recognized, check documentation for supported 1D data formats') 748 717 749 718 def __str__(self): … … 759 728 return _str 760 729 761 def clone_without_data(self, length=0, clone=None):762 """763 Clone the current object, without copying the data (which764 will be filled out by a subsequent operation).765 The data arrays will be initialized to zero.766 767 :param length: length of the data array to be initialized768 :param clone: if provided, the data will be copied to clone769 """770 from copy import deepcopy771 if clone is None or not issubclass(clone.__class__, Data1D):772 x = numpy.zeros(length)773 dx = numpy.zeros(length)774 y = numpy.zeros(length)775 dy = numpy.zeros(length)776 clone = Data1D(x, y, dx=dx, dy=dy)777 778 clone.title = self.title779 clone.run = self.run780 clone.filename = self.filename781 clone.instrument = self.instrument782 clone.notes = deepcopy(self.notes)783 clone.process = deepcopy(self.process)784 clone.detector = deepcopy(self.detector)785 clone.sample = deepcopy(self.sample)786 clone.source = deepcopy(self.source)787 clone.collimation = deepcopy(self.collimation)788 clone.trans_spectrum = deepcopy(self.trans_spectrum)789 clone.meta_data = deepcopy(self.meta_data)790 clone.errors = deepcopy(self.errors)791 792 return clone793 794 class Data1D(plottable_1D, DataInfo):795 """796 1D data class797 """798 x_unit = '1/A'799 y_unit = '1/cm'800 801 def __init__(self, x, y, dx=None, dy=None):802 DataInfo.__init__(self)803 plottable_1D.__init__(self, x, y, dx, dy)804 805 def __str__(self):806 """807 Nice printout808 """809 _str = "%s\n" % DataInfo.__str__(self)810 _str += "Data:\n"811 _str += " Type: %s\n" % self.__class__.__name__812 _str += " X-axis: %s\t[%s]\n" % (self._xaxis, self._xunit)813 _str += " Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit)814 _str += " Length: %g\n" % len(self.x)815 return _str816 817 730 def is_slit_smeared(self): 818 731 """ … … 843 756 y = numpy.zeros(length) 844 757 dy = numpy.zeros(length) 845 clone = Data1D(x, y, dx=dx, dy=dy) 758 lam = numpy.zeros(length) 759 dlam = numpy.zeros(length) 760 clone = Data1D(x, y, lam=lam, dx=dx, dy=dy, dlam=dlam) 846 761 847 762 clone.title = self.title … … 1018 933 ## Vector of Q-values at the center of each bin in y 1019 934 y_bins = None 935 ## No 2D SESANS data as of yet. Always set it to False 936 isSesans = False 1020 937 1021 938 def __init__(self, data=None, err_data=None, qx_data=None, 1022 939 qy_data=None, q_data=None, mask=None, 1023 940 dqx_data=None, dqy_data=None): 1024 self.y_bins = []1025 self.x_bins = []1026 941 DataInfo.__init__(self) 1027 942 plottable_2D.__init__(self, data, err_data, qx_data, 1028 943 qy_data, q_data, mask, dqx_data, dqy_data) 944 self.y_bins = [] 945 self.x_bins = [] 946 1029 947 if len(self.detector) > 0: 1030 948 raise RuntimeError, "Data2D: Detector bank already filled at init" … … 1265 1183 final_dataset.xmin = data.xmin 1266 1184 final_dataset.ymin = data.ymin 1185 final_dataset.isSesans = datainfo.isSesans 1267 1186 final_dataset.title = datainfo.title 1268 1187 final_dataset.run = datainfo.run -
src/sas/sascalc/dataloader/manipulations.py
rb699768 rb2b36932 143 143 :return: Data1D object 144 144 """ 145 if len(data2D.detector) !=1:145 if len(data2D.detector) > 1: 146 146 msg = "_Slab._avg: invalid number of " 147 147 msg += " detectors: %g" % len(data2D.detector) … … 299 299 error on number of counts, number of entries summed 300 300 """ 301 if len(data2D.detector) !=1:301 if len(data2D.detector) > 1: 302 302 msg = "Circular averaging: invalid number " 303 303 msg += "of detectors: %g" % len(data2D.detector) -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r7d94915 rd2471870 172 172 input_f.close() 173 173 if not is_data: 174 return None 174 msg = "ascii_reader: x has no data" 175 raise RuntimeError, msg 175 176 # Sanity check 176 177 if has_error_dy == True and not len(ty) == len(tdy): -
src/sas/sascalc/dataloader/readers/cansas_constants.py
r250fec92 rad4632c 133 133 "variable" : None, 134 134 "children" : {"Idata" : SASDATA_IDATA, 135 "Sesans": {"storeas": "content"}, 136 "zacceptance": {"storeas": "float"}, 135 137 "<any>" : ANY 136 138 } -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r0639476 rc221349 20 20 import inspect 21 21 # For saving individual sections of data 22 from sas.sascalc.dataloader.data_info import Data1D, DataInfo, plottable_1D 23 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, Detector, Process, Aperture 24 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable as combine_data 22 from sas.sascalc.dataloader.data_info import Data1D, Data2D, DataInfo, \ 23 plottable_1D, plottable_2D 24 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, \ 25 Detector, Process, Aperture 26 from sas.sascalc.dataloader.data_info import \ 27 combine_data_info_with_plottable as combine_data 25 28 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 26 29 from sas.sascalc.dataloader.readers.xml_reader import XMLreader … … 56 59 The CanSAS reader requires PyXML 0.8.4 or later. 57 60 """ 58 # #CanSAS version - defaults to version 1.061 # CanSAS version - defaults to version 1.0 59 62 cansas_version = "1.0" 60 63 base_ns = "{cansas1d/1.0}" … … 63 66 invalid = True 64 67 frm = "" 65 # #Log messages and errors68 # Log messages and errors 66 69 logging = None 67 70 errors = set() 68 # #Namespace hierarchy for current xml_file object71 # Namespace hierarchy for current xml_file object 69 72 names = None 70 73 ns_list = None 71 # #Temporary storage location for loading multiple data sets in a single file74 # Temporary storage location for loading multiple data sets in a single file 72 75 current_datainfo = None 73 76 current_dataset = None 74 77 current_data1d = None 75 78 data = None 76 # #List of data1D objects to be sent back to SasView79 # List of data1D objects to be sent back to SasView 77 80 output = None 78 # #Wildcards81 # Wildcards 79 82 type = ["XML files (*.xml)|*.xml", "SasView Save Files (*.svs)|*.svs"] 80 # #List of allowed extensions83 # List of allowed extensions 81 84 ext = ['.xml', '.XML', '.svs', '.SVS'] 82 # #Flag to bypass extension check85 # Flag to bypass extension check 83 86 allow_all = True 84 87 … … 220 223 self.parent_class = tagname_original 221 224 if tagname == 'SASdata': 222 self._initialize_new_data_set() 223 ## Recursion step to access data within the group 225 self._initialize_new_data_set(node) 226 if isinstance(self.current_dataset, plottable_2D): 227 x_bins = attr.get("x_bins", "") 228 y_bins = attr.get("y_bins", "") 229 if x_bins is not "" and y_bins is not "": 230 self.current_dataset.shape = (x_bins, y_bins) 231 else: 232 self.current_dataset.shape = () 233 # Recursion step to access data within the group 224 234 self._parse_entry(node, True) 225 235 if tagname == "SASsample": … … 234 244 self.add_intermediate() 235 245 else: 236 data_point, unit = self._get_node_value(node, tagname) 237 238 ## If this is a dataset, store the data appropriately 246 if isinstance(self.current_dataset, plottable_2D): 247 data_point = node.text 248 unit = attr.get('unit', '') 249 else: 250 data_point, unit = self._get_node_value(node, tagname) 251 252 # If this is a dataset, store the data appropriately 239 253 if tagname == 'Run': 240 254 self.current_datainfo.run_name[data_point] = name … … 245 259 self.current_datainfo.notes.append(data_point) 246 260 247 ## I and Q Data 248 elif tagname == 'I': 249 self.current_dataset.yaxis("Intensity", unit) 261 # I and Q - 1D data 262 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 263 unit_list = unit.split("|") 264 if len(unit_list) > 1: 265 self.current_dataset.yaxis(unit_list[0].strip(), 266 unit_list[1].strip()) 267 else: 268 self.current_dataset.yaxis("Intensity", unit) 250 269 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 251 elif tagname == 'Idev' :270 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 252 271 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 253 272 elif tagname == 'Q': 254 self.current_dataset.xaxis("Q", unit) 273 unit_list = unit.split("|") 274 if len(unit_list) > 1: 275 self.current_dataset.xaxis(unit_list[0].strip(), 276 unit_list[1].strip()) 277 else: 278 self.current_dataset.xaxis("Q", unit) 255 279 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 256 280 elif tagname == 'Qdev': … … 264 288 elif tagname == 'Shadowfactor': 265 289 pass 266 267 ## Sample Information 290 elif tagname == 'Sesans': 291 self.current_datainfo.isSesans = bool(data_point) 292 elif tagname == 'zacceptance': 293 self.current_datainfo.sample.zacceptance = (data_point, unit) 294 295 # I and Qx, Qy - 2D data 296 elif tagname == 'I' and isinstance(self.current_dataset, plottable_2D): 297 self.current_dataset.yaxis("Intensity", unit) 298 self.current_dataset.data = np.fromstring(data_point, dtype=float, sep=",") 299 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_2D): 300 self.current_dataset.err_data = np.fromstring(data_point, dtype=float, sep=",") 301 elif tagname == 'Qx': 302 self.current_dataset.xaxis("Qx", unit) 303 self.current_dataset.qx_data = np.fromstring(data_point, dtype=float, sep=",") 304 elif tagname == 'Qy': 305 self.current_dataset.yaxis("Qy", unit) 306 self.current_dataset.qy_data = np.fromstring(data_point, dtype=float, sep=",") 307 elif tagname == 'Qxdev': 308 self.current_dataset.xaxis("Qxdev", unit) 309 self.current_dataset.dqx_data = np.fromstring(data_point, dtype=float, sep=",") 310 elif tagname == 'Qydev': 311 self.current_dataset.yaxis("Qydev", unit) 312 self.current_dataset.dqy_data = np.fromstring(data_point, dtype=float, sep=",") 313 elif tagname == 'Mask': 314 inter = [item == "1" for item in data_point.split(",")] 315 self.current_dataset.mask = np.asarray(inter, dtype=bool) 316 317 # Sample Information 268 318 elif tagname == 'ID' and self.parent_class == 'SASsample': 269 319 self.current_datainfo.sample.ID = data_point … … 299 349 self.current_datainfo.sample.orientation_unit = unit 300 350 301 # #Instrumental Information351 # Instrumental Information 302 352 elif tagname == 'name' and self.parent_class == 'SASinstrument': 303 353 self.current_datainfo.instrument = data_point 304 # #Detector Information354 # Detector Information 305 355 elif tagname == 'name' and self.parent_class == 'SASdetector': 306 356 self.detector.name = data_point … … 347 397 self.detector.orientation.z = data_point 348 398 self.detector.orientation_unit = unit 349 # #Collimation and Aperture399 # Collimation and Aperture 350 400 elif tagname == 'length' and self.parent_class == 'SAScollimation': 351 401 self.collimation.length = data_point … … 366 416 self.collimation.size_unit = unit 367 417 368 # #Process Information418 # Process Information 369 419 elif tagname == 'name' and self.parent_class == 'SASprocess': 370 420 self.process.name = data_point … … 386 436 self.process.term.append(dic) 387 437 388 # #Transmission Spectrum438 # Transmission Spectrum 389 439 elif tagname == 'T' and self.parent_class == 'Tdata': 390 440 self.transspectrum.transmission = np.append(self.transspectrum.transmission, data_point) … … 397 447 self.transspectrum.wavelength_unit = unit 398 448 399 # #Source Information449 # Source Information 400 450 elif tagname == 'wavelength' and (self.parent_class == 'SASsource' or self.parent_class == 'SASData'): 401 451 self.current_datainfo.source.wavelength = data_point … … 424 474 self.current_datainfo.source.beam_shape = data_point 425 475 426 # #Everything else goes in meta_data476 # Everything else goes in meta_data 427 477 else: 428 478 new_key = self._create_unique_key(self.current_datainfo.meta_data, tagname) … … 438 488 self.add_data_set() 439 489 empty = None 440 if self.output[0].dx is not None:441 self.output[0].dxl = np.empty(0)442 self.output[0].dxw = np.empty(0)443 else:444 self.output[0].dx = np.empty(0)445 490 return self.output[0], empty 446 491 … … 514 559 self.current_datainfo = DataInfo() 515 560 516 def _initialize_new_data_set(self, parent_list=None):561 def _initialize_new_data_set(self, node=None): 517 562 """ 518 563 A private class method to generate a new 1D data object. 519 564 Outside methods should call add_data_set() to be sure any existing data is stored properly. 520 565 521 :param parent_list: List of names of parent elements 522 """ 523 524 if parent_list is None: 525 parent_list = [] 566 :param node: XML node to determine if 1D or 2D data 567 """ 526 568 x = np.array(0) 527 569 y = np.array(0) 570 for child in node: 571 if child.tag.replace(self.base_ns, "") == "Idata": 572 for i_child in child: 573 if i_child.tag.replace(self.base_ns, "") == "Qx": 574 self.current_dataset = plottable_2D() 575 return 528 576 self.current_dataset = plottable_1D(x, y) 529 577 … … 560 608 """ 561 609 562 # #Append errors to dataset and reset class errors610 # Append errors to dataset and reset class errors 563 611 self.current_datainfo.errors = set() 564 612 for error in self.errors: … … 566 614 self.errors.clear() 567 615 568 # #Combine all plottables with datainfo and append each to output569 # #Type cast data arrays to float64 and find min/max as appropriate616 # Combine all plottables with datainfo and append each to output 617 # Type cast data arrays to float64 and find min/max as appropriate 570 618 for dataset in self.data: 571 if dataset.x is not None: 572 dataset.x = np.delete(dataset.x, [0]) 573 dataset.x = dataset.x.astype(np.float64) 574 dataset.xmin = np.min(dataset.x) 575 dataset.xmax = np.max(dataset.x) 576 if dataset.y is not None: 577 dataset.y = np.delete(dataset.y, [0]) 578 dataset.y = dataset.y.astype(np.float64) 579 dataset.ymin = np.min(dataset.y) 580 dataset.ymax = np.max(dataset.y) 581 if dataset.dx is not None: 582 dataset.dx = np.delete(dataset.dx, [0]) 583 dataset.dx = dataset.dx.astype(np.float64) 584 if dataset.dxl is not None: 585 dataset.dxl = np.delete(dataset.dxl, [0]) 586 dataset.dxl = dataset.dxl.astype(np.float64) 587 if dataset.dxw is not None: 588 dataset.dxw = np.delete(dataset.dxw, [0]) 589 dataset.dxw = dataset.dxw.astype(np.float64) 590 if dataset.dy is not None: 591 dataset.dy = np.delete(dataset.dy, [0]) 592 dataset.dy = dataset.dy.astype(np.float64) 593 np.trim_zeros(dataset.x) 594 np.trim_zeros(dataset.y) 595 np.trim_zeros(dataset.dy) 619 if isinstance(dataset, plottable_1D): 620 if dataset.x is not None: 621 dataset.x = np.delete(dataset.x, [0]) 622 dataset.x = dataset.x.astype(np.float64) 623 dataset.xmin = np.min(dataset.x) 624 dataset.xmax = np.max(dataset.x) 625 if dataset.y is not None: 626 dataset.y = np.delete(dataset.y, [0]) 627 dataset.y = dataset.y.astype(np.float64) 628 dataset.ymin = np.min(dataset.y) 629 dataset.ymax = np.max(dataset.y) 630 if dataset.dx is not None: 631 dataset.dx = np.delete(dataset.dx, [0]) 632 dataset.dx = dataset.dx.astype(np.float64) 633 if dataset.dxl is not None: 634 dataset.dxl = np.delete(dataset.dxl, [0]) 635 dataset.dxl = dataset.dxl.astype(np.float64) 636 if dataset.dxw is not None: 637 dataset.dxw = np.delete(dataset.dxw, [0]) 638 dataset.dxw = dataset.dxw.astype(np.float64) 639 if dataset.dy is not None: 640 dataset.dy = np.delete(dataset.dy, [0]) 641 dataset.dy = dataset.dy.astype(np.float64) 642 np.trim_zeros(dataset.x) 643 np.trim_zeros(dataset.y) 644 np.trim_zeros(dataset.dy) 645 elif isinstance(dataset, plottable_2D): 646 dataset.data = dataset.data.astype(np.float64) 647 dataset.qx_data = dataset.qx_data.astype(np.float64) 648 dataset.xmin = np.min(dataset.qx_data) 649 dataset.xmax = np.max(dataset.qx_data) 650 dataset.qy_data = dataset.qy_data.astype(np.float64) 651 dataset.ymin = np.min(dataset.qy_data) 652 dataset.ymax = np.max(dataset.qy_data) 653 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 654 + dataset.qy_data * dataset.qy_data) 655 if dataset.err_data is not None: 656 dataset.err_data = dataset.err_data.astype(np.float64) 657 if dataset.dqx_data is not None: 658 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 659 if dataset.dqy_data is not None: 660 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 661 if dataset.mask is not None: 662 dataset.mask = dataset.mask.astype(dtype=bool) 663 664 if len(dataset.shape) == 2: 665 n_rows, n_cols = dataset.shape 666 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 667 dataset.x_bins = dataset.qx_data[:int(n_cols)] 668 dataset.data = dataset.data.flatten() 669 else: 670 dataset.y_bins = [] 671 dataset.x_bins = [] 672 dataset.data = dataset.data.flatten() 673 596 674 final_dataset = combine_data(dataset, self.current_datainfo) 597 675 self.output.append(final_dataset) … … 693 771 and local_unit.lower() != "none": 694 772 if HAS_CONVERTER == True: 695 # #Check local units - bad units raise KeyError773 # Check local units - bad units raise KeyError 696 774 data_conv_q = Converter(local_unit) 697 775 value_unit = default_unit … … 740 818 A method to check all resolution data sets are the same size as I and Q 741 819 """ 742 dql_exists = False 743 dqw_exists = False 744 dq_exists = False 745 di_exists = False 746 if self.current_dataset.dxl is not None: 747 dql_exists = True 748 if self.current_dataset.dxw is not None: 749 dqw_exists = True 750 if self.current_dataset.dx is not None: 751 dq_exists = True 752 if self.current_dataset.dy is not None: 753 di_exists = True 754 if dqw_exists and not dql_exists: 755 array_size = self.current_dataset.dxw.size - 1 756 self.current_dataset.dxl = np.append(self.current_dataset.dxl, np.zeros([array_size])) 757 elif dql_exists and not dqw_exists: 758 array_size = self.current_dataset.dxl.size - 1 759 self.current_dataset.dxw = np.append(self.current_dataset.dxw, np.zeros([array_size])) 760 elif not dql_exists and not dqw_exists and not dq_exists: 761 array_size = self.current_dataset.x.size - 1 762 self.current_dataset.dx = np.append(self.current_dataset.dx, np.zeros([array_size])) 763 if not di_exists: 764 array_size = self.current_dataset.y.size - 1 765 self.current_dataset.dy = np.append(self.current_dataset.dy, np.zeros([array_size])) 766 820 if isinstance(self.current_dataset, plottable_1D): 821 dql_exists = False 822 dqw_exists = False 823 dq_exists = False 824 di_exists = False 825 if self.current_dataset.dxl is not None: 826 dql_exists = True 827 if self.current_dataset.dxw is not None: 828 dqw_exists = True 829 if self.current_dataset.dx is not None: 830 dq_exists = True 831 if self.current_dataset.dy is not None: 832 di_exists = True 833 if dqw_exists and not dql_exists: 834 array_size = self.current_dataset.dxw.size - 1 835 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 836 np.zeros([array_size])) 837 elif dql_exists and not dqw_exists: 838 array_size = self.current_dataset.dxl.size - 1 839 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 840 np.zeros([array_size])) 841 elif not dql_exists and not dqw_exists and not dq_exists: 842 array_size = self.current_dataset.x.size - 1 843 self.current_dataset.dx = np.append(self.current_dataset.dx, 844 np.zeros([array_size])) 845 if not di_exists: 846 array_size = self.current_dataset.y.size - 1 847 self.current_dataset.dy = np.append(self.current_dataset.dy, 848 np.zeros([array_size])) 849 elif isinstance(self.current_dataset, plottable_2D): 850 dqx_exists = False 851 dqy_exists = False 852 di_exists = False 853 mask_exists = False 854 if self.current_dataset.dqx_data is not None: 855 dqx_exists = True 856 if self.current_dataset.dqy_data is not None: 857 dqy_exists = True 858 if self.current_dataset.err_data is not None: 859 di_exists = True 860 if self.current_dataset.mask is not None: 861 mask_exists = True 862 if not dqy_exists: 863 array_size = self.current_dataset.qy_data.size - 1 864 self.current_dataset.dqy_data = np.append( 865 self.current_dataset.dqy_data, np.zeros([array_size])) 866 if not dqx_exists: 867 array_size = self.current_dataset.qx_data.size - 1 868 self.current_dataset.dqx_data = np.append( 869 self.current_dataset.dqx_data, np.zeros([array_size])) 870 if not di_exists: 871 array_size = self.current_dataset.data.size - 1 872 self.current_dataset.err_data = np.append( 873 self.current_dataset.err_data, np.zeros([array_size])) 874 if not mask_exists: 875 array_size = self.current_dataset.data.size - 1 876 self.current_dataset.mask = np.append( 877 self.current_dataset.mask, 878 np.ones([array_size] ,dtype=bool)) 767 879 768 880 ####### All methods below are for writing CanSAS XML files ####### 769 770 881 771 882 def write(self, filename, datainfo): … … 792 903 :param datainfo: Data1D object 793 904 """ 794 if not issubclass(datainfo.__class__, Data1D): 795 raise RuntimeError, "The cansas writer expects a Data1D instance" 905 is_2d = False 906 if issubclass(datainfo.__class__, Data2D): 907 is_2d = True 796 908 797 909 # Get PIs and create root element … … 813 925 self._write_run_names(datainfo, entry_node) 814 926 # Add Data info to SASEntry 815 self._write_data(datainfo, entry_node) 927 if is_2d: 928 self._write_data_2d(datainfo, entry_node) 929 else: 930 self._write_data(datainfo, entry_node) 816 931 # Transmission Spectrum Info 817 932 self._write_trans_spectrum(datainfo, entry_node) … … 907 1022 def _write_data(self, datainfo, entry_node): 908 1023 """ 909 Writes theI and Q data to the XML file1024 Writes 1D I and Q data to the XML file 910 1025 911 1026 :param datainfo: The Data1D object the information is coming from … … 919 1034 node.append(point) 920 1035 self.write_node(point, "Q", datainfo.x[i], 921 {'unit': datainfo. x_unit})1036 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 922 1037 if len(datainfo.y) >= i: 923 1038 self.write_node(point, "I", datainfo.y[i], 924 {'unit': datainfo. y_unit})1039 {'unit': datainfo._yaxis + " | " + datainfo._yunit}) 925 1040 if datainfo.dy is not None and len(datainfo.dy) > i: 926 1041 self.write_node(point, "Idev", datainfo.dy[i], 927 {'unit': datainfo. y_unit})1042 {'unit': datainfo._yaxis + " | " + datainfo._yunit}) 928 1043 if datainfo.dx is not None and len(datainfo.dx) > i: 929 1044 self.write_node(point, "Qdev", datainfo.dx[i], 930 {'unit': datainfo. x_unit})1045 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 931 1046 if datainfo.dxw is not None and len(datainfo.dxw) > i: 932 1047 self.write_node(point, "dQw", datainfo.dxw[i], 933 {'unit': datainfo. x_unit})1048 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 934 1049 if datainfo.dxl is not None and len(datainfo.dxl) > i: 935 1050 self.write_node(point, "dQl", datainfo.dxl[i], 936 {'unit': datainfo.x_unit}) 1051 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 1052 if datainfo.isSesans: 1053 sesans = self.create_element("Sesans") 1054 sesans.text = str(datainfo.isSesans) 1055 node.append(sesans) 1056 self.write_node(node, "zacceptance", datainfo.sample.zacceptance[0], 1057 {'unit': datainfo.sample.zacceptance[1]}) 1058 1059 1060 def _write_data_2d(self, datainfo, entry_node): 1061 """ 1062 Writes 2D data to the XML file 1063 1064 :param datainfo: The Data2D object the information is coming from 1065 :param entry_node: lxml node ElementTree object to be appended to 1066 """ 1067 attr = {} 1068 if datainfo.data.shape: 1069 attr["x_bins"] = str(len(datainfo.x_bins)) 1070 attr["y_bins"] = str(len(datainfo.y_bins)) 1071 node = self.create_element("SASdata", attr) 1072 self.append(node, entry_node) 1073 1074 point = self.create_element("Idata") 1075 node.append(point) 1076 qx = ','.join([str(datainfo.qx_data[i]) for i in xrange(len(datainfo.qx_data))]) 1077 qy = ','.join([str(datainfo.qy_data[i]) for i in xrange(len(datainfo.qy_data))]) 1078 intensity = ','.join([str(datainfo.data[i]) for i in xrange(len(datainfo.data))]) 1079 1080 self.write_node(point, "Qx", qx, 1081 {'unit': datainfo._xunit}) 1082 self.write_node(point, "Qy", qy, 1083 {'unit': datainfo._yunit}) 1084 self.write_node(point, "I", intensity, 1085 {'unit': datainfo._zunit}) 1086 if datainfo.err_data is not None: 1087 err = ','.join([str(datainfo.err_data[i]) for i in 1088 xrange(len(datainfo.err_data))]) 1089 self.write_node(point, "Idev", err, 1090 {'unit': datainfo._zunit}) 1091 if datainfo.dqy_data is not None: 1092 dqy = ','.join([str(datainfo.dqy_data[i]) for i in 1093 xrange(len(datainfo.dqy_data))]) 1094 self.write_node(point, "Qydev", dqy, 1095 {'unit': datainfo._yunit}) 1096 if datainfo.dqx_data is not None: 1097 dqx = ','.join([str(datainfo.dqx_data[i]) for i in 1098 xrange(len(datainfo.dqx_data))]) 1099 self.write_node(point, "Qxdev", dqx, 1100 {'unit': datainfo._xunit}) 1101 if datainfo.mask is not None: 1102 mask = ','.join( 1103 ["1" if datainfo.mask[i] else "0" 1104 for i in xrange(len(datainfo.mask))]) 1105 self.write_node(point, "Mask", mask) 937 1106 938 1107 def _write_trans_spectrum(self, datainfo, entry_node): -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r5e906207 rbbd0f37 162 162 else: 163 163 self.current_dataset.x = data_set.flatten() 164 continue 165 elif key == u'Qdev': 166 self.current_dataset.dx = data_set.flatten() 164 167 continue 165 168 elif key == u'Qy': -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_0.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/dataloader/readers/schema/cansas1d_invalid_v1_1.xsd
r250fec92 raf08e55 24 24 25 25 <complexType name="IdataType"> 26 <xsd:choice> 26 27 <sequence> 27 28 <element name="Q" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> … … 40 41 <xsd:any minOccurs="0" maxOccurs="unbounded" processContents="lax" namespace="##other" /> 41 42 </sequence> 43 <sequence> 44 <element name="Qx" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 45 <element name="Qy" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 46 <element name="I" minOccurs="1" maxOccurs="1" type="tns:floatUnitType" /> 47 <element name="Idev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 48 <element name="Qydev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 49 <element name="Qxdev" minOccurs="0" maxOccurs="1" type="tns:floatUnitType" default="0" /> 50 <element name="Mask" minOccurs="0" maxOccurs="1" type="string" default="0" /> 51 </sequence> 52 </xsd:choice> 42 53 </complexType> 43 54 … … 51 62 <attribute name="name" type="string" use="optional" default="" /> 52 63 <attribute name="timestamp" type="dateTime" use="optional" /> 64 <attribute name="x_bins" type="string" use="optional" /> 65 <attribute name="y_bins" type="string" use="optional" /> 53 66 </complexType> 54 67 -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r1c0e3b0 r7caf3e5 8 8 import numpy 9 9 import os 10 from sas.sascalc.dataloader.data_info import SESANSData1D10 from sas.sascalc.dataloader.data_info import Data1D 11 11 12 12 # Check whether we have a converter available … … 59 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 60 buff = input_f.read() 61 # print buff62 61 lines = buff.splitlines() 63 # print lines64 #Jae could not find python universal line spliter:65 #keep the below for now66 # some ascii data has \r line separator,67 # try it when the data is on only one long line68 # if len(lines) < 2 :69 # lines = buff.split('\r')70 71 62 x = numpy.zeros(0) 72 63 y = numpy.zeros(0) … … 83 74 tdlam = numpy.zeros(0) 84 75 tdx = numpy.zeros(0) 85 # print "all good" 86 output = SESANSData1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam) 87 # print output 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 88 77 self.filename = output.filename = basename 89 78 90 # #Initialize counters for data lines and header lines.91 # is_data = False # Has more than 5 lines92 # # More than "5" lines of data is considered as actual93 # # data unless that is the only data94 # mum_data_lines = 595 # # To count # of current data candidate lines96 # i = -197 # # To count total # of previous data candidate lines98 # i1 = -199 # # To count # of header lines100 # j = -1101 # # Helps to count # of header lines102 # j1 = -1103 # #minimum required number of columns of data; ( <= 4).104 # lentoks = 2105 79 paramnames=[] 106 80 paramvals=[] … … 111 85 Pvals=[] 112 86 dPvals=[] 113 # print x 114 # print zvals 87 115 88 for line in lines: 116 89 # Initial try for CSV (split on ,) … … 122 95 if len(toks)>5: 123 96 zvals.append(toks[0]) 124 dzvals.append(toks[ 1])125 lamvals.append(toks[ 2])126 dlamvals.append(toks[ 3])127 Pvals.append(toks[ 4])128 dPvals.append(toks[ 5])97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 129 102 else: 130 103 continue … … 140 113 default_z_unit = "A" 141 114 data_conv_P = None 142 default_p_unit = " " 115 default_p_unit = " " # Adjust unit for axis (L^-3) 143 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 144 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 145 120 valrange=range(1, len(zvals)) … … 161 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 162 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 163 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 164 140 output.dy = dy 165 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 166 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 167 146 168 output.xaxis("\rm{z}", output.x_unit)169 output.yaxis("\\rm{P/P0}", output.y_unit)170 147 # Store loading process information 171 148 output.meta_data['loader'] = self.type_name 172 output.sample.thickness = float(paramvals[6])149 #output.sample.thickness = float(paramvals[6]) 173 150 output.sample.name = paramvals[1] 174 151 output.sample.ID = paramvals[0] 175 152 zaccept_unit_split = paramnames[7].split("[") 176 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 177 if zaccept_unit.strip() == '\AA^-1':154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 178 155 zaccept_unit = "1/A" 179 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 180 output.vars =varheader157 output.vars = varheader 181 158 182 159 if len(output.x) < 1: -
src/sas/sascalc/file_converter/c_ext/bsl_loader.c
rdc8a553 r2ab9c432 1 1 #include <Python.h> 2 //#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 2 3 #include <numpy/arrayobject.h> 3 4 #include <stdio.h> … … 21 22 static PyObject *CLoader_init(CLoader *self, PyObject *args, PyObject *kwds) { 22 23 const char *filename; 23 constint n_frames;24 constint n_pixels;25 constint n_rasters;26 constint swap_bytes;24 int n_frames; 25 int n_pixels; 26 int n_rasters; 27 int swap_bytes; 27 28 28 29 if (self != NULL) { -
src/sas/sascalc/fit/AbstractFitEngine.py
rfc18690 ra9f579c 131 131 a way to get residuals from data. 132 132 """ 133 def __init__(self, x, y, dx=None, dy=None, smearer=None, data=None ):133 def __init__(self, x, y, dx=None, dy=None, smearer=None, data=None, lam=None, dlam=None): 134 134 """ 135 135 :param smearer: is an object of class QSmearer or SlitSmearer … … 152 152 153 153 """ 154 Data1D.__init__(self, x=x, y=y, dx=dx, dy=dy )154 Data1D.__init__(self, x=x, y=y, dx=dx, dy=dy, lam=lam, dlam=dlam) 155 155 self.num_points = len(x) 156 156 self.sas_data = data … … 359 359 if self.smearer != None: 360 360 fn.set_index(self.idx) 361 # Get necessary data from self.data and set the data for smearing362 fn.get_data()363 364 361 gn = fn.get_value() 365 362 else: -
src/sas/sascalc/fit/BumpsFitting.py
rb699768 r345e7e4 4 4 import os 5 5 from datetime import timedelta, datetime 6 import traceback 6 7 7 8 import numpy … … 293 294 R.success = result['success'] 294 295 if R.success: 295 R.stderr = numpy.hstack((result['stderr'][fitted_index], 296 numpy.NaN*numpy.ones(len(fitness.computed_pars)))) 296 if result['stderr'] is None: 297 R.stderr = numpy.NaN*numpy.ones(len(param_list)) 298 else: 299 R.stderr = numpy.hstack((result['stderr'][fitted_index], 300 numpy.NaN*numpy.ones(len(fitness.computed_pars)))) 297 301 R.pvec = numpy.hstack((result['value'][fitted_index], 298 302 [p.value for p in fitness.computed_pars])) … … 306 310 R.uncertainty_state = result['uncertainty'] 307 311 all_results.append(R) 312 all_results[0].mesg = result['errors'] 308 313 309 314 if q is not None: … … 344 349 try: 345 350 best, fbest = fitdriver.fit() 346 except: 347 import traceback; traceback.print_exc() 348 raise 351 errors = [] 352 except Exception as exc: 353 best, fbest = None, numpy.NaN 354 errors = [str(exc), traceback.traceback.format_exc()] 349 355 finally: 350 356 mapper.stop_mapper(fitdriver.mapper) … … 356 362 357 363 success = best is not None 364 try: 365 stderr = fitdriver.stderr() if success else None 366 except Exception as exc: 367 errors.append(str(exc)) 368 errors.append(traceback.format_exc()) 369 stderr = None 358 370 return { 359 371 'value': best if success else None, 360 'stderr': fitdriver.stderr() if success else None,372 'stderr': stderr, 361 373 'success': success, 362 374 'convergence': convergence, 363 375 'uncertainty': getattr(fitdriver.fitter, 'state', None), 376 'errors': '\n'.join(errors), 364 377 } 365 378
Note: See TracChangeset
for help on using the changeset viewer.