Changeset 574adc7 in sasview
- Timestamp:
- Sep 22, 2017 4:01:32 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 34d7b35
- Parents:
- 9706d88
- Files:
-
- 39 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/calculator/BaseComponent.py
r9a5097c r574adc7 143 143 qdist[1].__class__.__name__ != 'ndarray': 144 144 msg = "evalDistribution expects a list of 2 ndarrays" 145 raise RuntimeError , msg145 raise RuntimeError(msg) 146 146 147 147 # Extract qx and qy for code clarity … … 167 167 mesg = "evalDistribution is expecting an ndarray of scalar q-values" 168 168 mesg += " or a list [qx,qy] where qx,qy are 2D ndarrays." 169 raise RuntimeError , mesg169 raise RuntimeError(mesg) 170 170 171 171 … … 228 228 return 229 229 230 raise ValueError , "Model does not contain parameter %s" % name230 raise ValueError("Model does not contain parameter %s" % name) 231 231 232 232 def getParam(self, name): … … 250 250 return self.params[item] 251 251 252 raise ValueError , "Model does not contain parameter %s" % name252 raise ValueError("Model does not contain parameter %s" % name) 253 253 254 254 def getParamList(self): … … 294 294 add 295 295 """ 296 raise ValueError , "Model operation are no longer supported"296 raise ValueError("Model operation are no longer supported") 297 297 def __sub__(self, other): 298 298 """ 299 299 sub 300 300 """ 301 raise ValueError , "Model operation are no longer supported"301 raise ValueError("Model operation are no longer supported") 302 302 def __mul__(self, other): 303 303 """ 304 304 mul 305 305 """ 306 raise ValueError , "Model operation are no longer supported"306 raise ValueError("Model operation are no longer supported") 307 307 def __div__(self, other): 308 308 """ 309 309 div 310 310 """ 311 raise ValueError , "Model operation are no longer supported"311 raise ValueError("Model operation are no longer supported") 312 312 313 313 -
src/sas/sascalc/calculator/instrument.py
r9a5097c r574adc7 222 222 """ 223 223 # check if the wavelength is in range 224 if min(band) < self.min or\ 225 max(band) > self.max: 226 raise 224 if min(band) < self.min or max(band) > self.max: 225 raise ValueError("band out of range") 227 226 self.band = band 228 227 … … 239 238 """ 240 239 # check if the wavelength is in range 241 if wavelength < min(self.band) or\ 242 wavelength > max(self.band): 243 raise 240 if wavelength < min(self.band) or wavelength > max(self.band): 241 raise ValueError("wavelength out of range") 244 242 self.wavelength = wavelength 245 243 validate(wavelength) … … 324 322 plt.show() 325 323 except: 326 raise RuntimeError , "Can't import matplotlib required to plot..."324 raise RuntimeError("Can't import matplotlib required to plot...") 327 325 328 326 -
src/sas/sascalc/calculator/resolution_calculator.py
r7432acb r574adc7 4 4 instrumental parameters. 5 5 """ 6 from instrument import Sample 7 from instrument import Detector 8 from instrument import TOF as Neutron 9 from instrument import Aperture 10 # import math stuffs 11 from math import pi 12 from math import sqrt 6 import sys 7 from math import pi, sqrt 13 8 import math 9 import logging 10 14 11 import numpy as np 15 import sys 16 import logging 12 13 from .instrument import Sample 14 from .instrument import Detector 15 from .instrument import TOF as Neutron 16 from .instrument import Aperture 17 17 18 18 logger = logging.getLogger(__name__) … … 208 208 if wavelength == 0: 209 209 msg = "Can't compute the resolution: the wavelength is zero..." 210 raise RuntimeError , msg210 raise RuntimeError(msg) 211 211 return self.intensity 212 212 … … 379 379 if qx_min < self.qx_min: 380 380 self.qx_min = qx_min 381 #raise ValueError , msg381 #raise ValueError(msg) 382 382 if qx_max > self.qx_max: 383 383 self.qx_max = qx_max 384 #raise ValueError , msg384 #raise ValueError(msg) 385 385 if qy_min < self.qy_min: 386 386 self.qy_min = qy_min 387 #raise ValueError , msg387 #raise ValueError(msg) 388 388 if qy_max > self.qy_max: 389 389 self.qy_max = qy_max 390 #raise ValueError , msg390 #raise ValueError(msg) 391 391 if not full_cal: 392 392 return None … … 503 503 # otherwise 504 504 else: 505 raise ValueError , " Improper input..."505 raise ValueError(" Improper input...") 506 506 # get them squared 507 507 sigma = x_comp * x_comp … … 706 706 #self.set_wavelength(wavelength) 707 707 else: 708 raise 708 raise TypeError("invalid wavlength---should be list or float") 709 709 710 710 def set_wave_spread(self, wavelength_spread): … … 717 717 self.wave.set_wave_spread_list([wavelength_spread]) 718 718 else: 719 raise 719 raise TypeError("invalid wavelength spread---should be list or float") 720 720 721 721 def set_wavelength(self, wavelength): … … 766 766 """ 767 767 if len(size) < 1 or len(size) > 2: 768 raise RuntimeError , "The length of the size must be one or two."768 raise RuntimeError("The length of the size must be one or two.") 769 769 self.aperture.set_source_size(size) 770 770 … … 783 783 """ 784 784 if len(size) < 1 or len(size) > 2: 785 raise RuntimeError , "The length of the size must be one or two."785 raise RuntimeError("The length of the size must be one or two.") 786 786 self.aperture.set_sample_size(size) 787 787 … … 806 806 """ 807 807 if len(distance) < 1 or len(distance) > 2: 808 raise RuntimeError , "The length of the size must be one or two."808 raise RuntimeError("The length of the size must be one or two.") 809 809 self.aperture.set_sample_distance(distance) 810 810 … … 816 816 """ 817 817 if len(distance) < 1 or len(distance) > 2: 818 raise RuntimeError , "The length of the size must be one or two."818 raise RuntimeError("The length of the size must be one or two.") 819 819 self.sample.set_distance(distance) 820 820 … … 826 826 """ 827 827 if len(distance) < 1 or len(distance) > 2: 828 raise RuntimeError , "The length of the size must be one or two."828 raise RuntimeError("The length of the size must be one or two.") 829 829 self.detector.set_distance(distance) 830 830 … … 998 998 pix_y_size = detector_pix_size[1] 999 999 else: 1000 raise ValueError , " Input value format error..."1000 raise ValueError(" Input value format error...") 1001 1001 # Sample to detector distance = sample slit to detector 1002 1002 # minus sample offset -
src/sas/sascalc/calculator/sas_gen.py
rf2ea95a r574adc7 34 34 factor = MFACTOR_MT 35 35 else: 36 raise ValueError , "Invalid valueunit"36 raise ValueError("Invalid valueunit") 37 37 sld_m = factor * mag 38 38 return sld_m … … 100 100 """ 101 101 if self.data_vol is None: 102 raise 102 raise TypeError("data_vol is missing") 103 103 self.data_vol = volume 104 104 … … 174 174 if len(x[1]) > 0: 175 175 msg = "Not a 1D." 176 raise ValueError , msg176 raise ValueError(msg) 177 177 i_out = np.zeros_like(x[0]) 178 178 # 1D I is found at y =0 in the 2D pattern … … 181 181 else: 182 182 msg = "Q must be given as list of qx's and qy's" 183 raise ValueError , msg183 raise ValueError(msg) 184 184 185 185 def runXY(self, x=0.0): … … 196 196 else: 197 197 msg = "Q must be given as list of qx's and qy's" 198 raise ValueError , msg198 raise ValueError(msg) 199 199 200 200 def evalDistribution(self, qdist): … … 214 214 mesg = "evalDistribution is expecting an ndarray of " 215 215 mesg += "a list [qx,qy] where qx,qy are arrays." 216 raise RuntimeError , mesg216 raise RuntimeError(mesg) 217 217 218 218 class OMF2SLD(object): … … 313 313 :Params length: data length 314 314 """ 315 msg = "Error: Inconsistent data length." 316 if len(self.pos_x) != length: 317 raise ValueError, msg 318 if len(self.pos_y) != length: 319 raise ValueError, msg 320 if len(self.pos_z) != length: 321 raise ValueError, msg 322 if len(self.mx) != length: 323 raise ValueError, msg 324 if len(self.my) != length: 325 raise ValueError, msg 326 if len(self.mz) != length: 327 raise ValueError, msg 315 parts = (self.pos_x, self.pos_y, self.pos_z, self.mx, self.my, self.mz) 316 if any(len(v) != length for v in parts): 317 raise ValueError("Error: Inconsistent data length.") 328 318 329 319 def remove_null_points(self, remove=False, recenter=False): … … 415 405 msg = "Error: \n" 416 406 msg += "We accept only m as meshunit" 417 raise ValueError , msg407 raise ValueError(msg) 418 408 if s_line[0].lower().count("xbase") > 0: 419 409 xbase = s_line[1].lstrip() … … 485 475 msg = "%s is not supported: \n" % path 486 476 msg += "We accept only Text format OMF file." 487 raise RuntimeError , msg477 raise RuntimeError(msg) 488 478 489 479 class PDBReader(object): … … 605 595 return output 606 596 except: 607 raise RuntimeError , "%s is not a sld file" % path597 raise RuntimeError("%s is not a sld file" % path) 608 598 609 599 def write(self, path, data): … … 697 687 return output 698 688 except: 699 raise RuntimeError , "%s is not a sld file" % path689 raise RuntimeError("%s is not a sld file" % path) 700 690 701 691 def write(self, path, data): … … 706 696 """ 707 697 if path is None: 708 raise ValueError , "Missing the file path."698 raise ValueError("Missing the file path.") 709 699 if data is None: 710 raise ValueError , "Missing the data to save."700 raise ValueError("Missing the data to save.") 711 701 x_val = data.pos_x 712 702 y_val = data.pos_y -
src/sas/sascalc/corfunc/corfunc_calculator.py
ra859f99 r574adc7 88 88 # Only process data of the class Data1D 89 89 if not issubclass(data.__class__, Data1D): 90 raise ValueError , "Data must be of the type DataLoader.Data1D"90 raise ValueError("Data must be of the type DataLoader.Data1D") 91 91 92 92 # Prepare the data … … 161 161 err = ("Incorrect transform type supplied, must be 'fourier'", 162 162 " or 'hilbert'") 163 raise ValueError , err163 raise ValueError(err) 164 164 165 165 self._transform_thread.queue() -
src/sas/sascalc/data_util/calcthread.py
ra1b8fee r574adc7 6 6 from __future__ import print_function 7 7 8 import thread9 8 import traceback 10 9 import sys 11 10 import logging 11 try: 12 import _thread as thread 13 except ImportError: # CRUFT: python 2 support 14 import thread 12 15 13 16 if sys.platform.count("darwin") > 0: 14 17 import time 15 18 stime = time.time() 16 19 17 20 def clock(): 18 21 return time.time() - stime 19 22 20 23 def sleep(t): 21 24 return time.sleep(t) … … 35 38 CalcThread.__init__, passing it the keyword arguments for 36 39 yieldtime, worktime, update and complete. 37 40 38 41 When defining the compute() method you need to include code which 39 42 allows the GUI to run. They are as follows: :: … … 211 214 self._lock.release() 212 215 self._time_for_update += 1e6 # No more updates 213 216 214 217 self.updatefn(**kwargs) 215 218 sleep(self.yieldtime) -
src/sas/sascalc/data_util/nxsunit.py
r8e9536f r574adc7 13 13 in the NeXus definition files. 14 14 15 Unlike other units packages, this package does not carry the units along with 15 Unlike other units packages, this package does not carry the units along with 16 16 the value but merely provides a conversion function for transforming values. 17 17 … … 68 68 Ack! Allows, e.g., Coulomb and coulomb even though Coulomb is not 69 69 a unit because some NeXus files store it that way! 70 70 71 71 Returns a dictionary of names and scales. 72 72 """ … … 78 78 n=1e-9,p=1e-12,f=1e-15) 79 79 map = {abbr:1} 80 map.update([(P+abbr,scale) for (P,scale) in short_prefix.ite ritems()])80 map.update([(P+abbr,scale) for (P,scale) in short_prefix.items()]) 81 81 for name in [unit,unit.capitalize()]: 82 82 map.update({name:1,name+'s':1}) 83 map.update([(P+name,scale) for (P,scale) in prefix.ite ritems()])84 map.update([(P+'*'+name,scale) for (P,scale) in prefix.ite ritems()])85 map.update([(P+name+'s',scale) for (P,scale) in prefix.ite ritems()])83 map.update([(P+name,scale) for (P,scale) in prefix.items()]) 84 map.update([(P+'*'+name,scale) for (P,scale) in prefix.items()]) 85 map.update([(P+name+'s',scale) for (P,scale) in prefix.items()]) 86 86 return map 87 87 … … 91 91 """ 92 92 map = {} 93 map.update([(name,scale) for name,scale in kw.ite ritems()])94 map.update([(name+'s',scale) for name,scale in kw.ite ritems()])93 map.update([(name,scale) for name,scale in kw.items()]) 94 map.update([(name+'s',scale) for name,scale in kw.items()]) 95 95 return map 96 96 … … 101 101 * WARNING * this will incorrect transform 10^3 to 103. 102 102 """ 103 s.update((k.replace('^',''),v) 104 for k, v in s.items()103 s.update((k.replace('^',''),v) 104 for k, v in list(s.items()) 105 105 if '^' in k) 106 106 … … 130 130 temperature.update(_build_metric_units('Celcius', 'C')) 131 131 temperature.update(_build_metric_units('celcius', 'C')) 132 132 133 133 charge = _build_metric_units('coulomb','C') 134 134 charge.update({'microAmp*hour':0.0036}) 135 135 136 136 sld = { '10^-6 Angstrom^-2': 1e-6, 'Angstrom^-2': 1 } 137 Q = { 'invA': 1, 'invAng': 1, 'invAngstroms': 1, '1/A': 1, 137 Q = { 'invA': 1, 'invAng': 1, 'invAngstroms': 1, '1/A': 1, 138 138 '10^-3 Angstrom^-1': 1e-3, '1/cm': 1e-8, '1/m': 1e-10, 139 139 'nm^-1': 0.1, '1/nm': 0.1, 'n_m^-1': 0.1 } … … 189 189 190 190 def _check(expect,get): 191 if expect != get: raise ValueError, "Expected %s but got %s"%(expect,get) 191 if expect != get: 192 raise ValueError("Expected %s but got %s"%(expect, get)) 192 193 #print expect,"==",get 193 194 … … 202 203 _check(123,Converter('a.u.')(123,units='s')) # arbitrary units always returns the same value 203 204 _check(123,Converter('a.u.')(123,units='')) # arbitrary units always returns the same value 204 try: Converter('help') 205 except KeyError: pass 206 else: raise Exception("unknown unit did not raise an error") 205 try: 206 Converter('help') 207 except KeyError: 208 pass 209 else: 210 raise Exception("unknown unit did not raise an error") 207 211 208 212 # TODO: more tests -
src/sas/sascalc/data_util/odict.py
rb699768 r574adc7 39 39 """ 40 40 A class of dictionary that keeps the insertion order of keys. 41 41 42 42 All appropriate methods return keys, items, or values in an ordered way. 43 43 44 44 All normal dictionary methods are available. Update and comparison is 45 45 restricted to other OrderedDict objects. 46 46 47 47 Various sequence methods are available, including the ability to explicitly 48 48 mutate the key ordering. 49 49 50 50 __contains__ tests: 51 51 52 52 >>> d = OrderedDict(((1, 3),)) 53 53 >>> 1 in d … … 55 55 >>> 4 in d 56 56 0 57 57 58 58 __getitem__ tests: 59 59 60 60 >>> OrderedDict(((1, 3), (3, 2), (2, 1)))[2] 61 61 1 … … 63 63 Traceback (most recent call last): 64 64 KeyError: 4 65 65 66 66 __len__ tests: 67 67 68 68 >>> len(OrderedDict()) 69 69 0 70 70 >>> len(OrderedDict(((1, 3), (3, 2), (2, 1)))) 71 71 3 72 72 73 73 get tests: 74 74 75 75 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 76 76 >>> d.get(1) … … 82 82 >>> d 83 83 OrderedDict([(1, 3), (3, 2), (2, 1)]) 84 84 85 85 has_key tests: 86 86 87 87 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 88 88 >>> d.has_key(1) … … 96 96 Create a new ordered dictionary. Cannot init from a normal dict, 97 97 nor from kwargs, since items order is undefined in those cases. 98 98 99 99 If the ``strict`` keyword argument is ``True`` (``False`` is the 100 100 default) then when doing slice assignment - the ``OrderedDict`` you are 101 101 assigning from *must not* contain any keys in the remaining dict. 102 102 103 103 >>> OrderedDict() 104 104 OrderedDict([]) … … 283 283 """ 284 284 Used for __repr__ and __str__ 285 285 286 286 >>> r1 = repr(OrderedDict((('a', 'b'), ('c', 'd'), ('e', 'f')))) 287 287 >>> r1 … … 321 321 >>> d 322 322 OrderedDict([(0, 1), (1, 2), (5, 6), (7, 8), (3, 4)]) 323 323 324 324 >>> a = OrderedDict(((0, 1), (1, 2), (2, 3)), strict=True) 325 325 >>> a[3] = 4 … … 345 345 >>> a 346 346 OrderedDict([(3, 4), (2, 3), (1, 2), (0, 1)]) 347 347 348 348 >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)]) 349 349 >>> d[:1] = 3 350 350 Traceback (most recent call last): 351 351 TypeError: slice assignment requires an OrderedDict 352 352 353 353 >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)]) 354 354 >>> d[:1] = OrderedDict([(9, 8)]) … … 444 444 """ 445 445 Implemented so that access to ``sequence`` raises a warning. 446 446 447 447 >>> d = OrderedDict() 448 448 >>> d.sequence … … 463 463 """ 464 464 To allow deepcopy to work with OrderedDict. 465 465 466 466 >>> from copy import deepcopy 467 467 >>> a = OrderedDict([(1, 1), (2, 2), (3, 3)]) … … 490 490 def items(self): 491 491 """ 492 ``items`` returns a list of tuples representing all the 492 ``items`` returns a list of tuples representing all the 493 493 ``(key, value)`` pairs in the dictionary. 494 494 495 495 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 496 496 >>> d.items() … … 505 505 """ 506 506 Return a list of keys in the ``OrderedDict``. 507 507 508 508 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 509 509 >>> d.keys() … … 515 515 """ 516 516 Return a list of all the values in the OrderedDict. 517 517 518 518 Optionally you can pass in a list of values, which will replace the 519 519 current list. The value list must be the same len as the OrderedDict. 520 520 521 521 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 522 522 >>> d.values() … … 596 596 """ 597 597 No dict.pop in Python 2.2, gotta reimplement it 598 598 599 599 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 600 600 >>> d.pop(3) … … 612 612 """ 613 613 if len(args) > 1: 614 raise TypeError ,('pop expected at most 2 arguments, got %s' %614 raise TypeError('pop expected at most 2 arguments, got %s' % 615 615 (len(args) + 1)) 616 616 if key in self: … … 628 628 Delete and return an item specified by index, not a random one as in 629 629 dict. The index is -1 by default (the last item). 630 630 631 631 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 632 632 >>> d.popitem() … … 674 674 """ 675 675 Update from another OrderedDict or sequence of (key, value) pairs 676 676 677 677 >>> d = OrderedDict(((1, 0), (0, 1))) 678 678 >>> d.update(OrderedDict(((1, 3), (3, 2), (2, 1)))) … … 706 706 """ 707 707 Rename the key for a given value, without modifying sequence order. 708 708 709 709 For the case where new_key already exists this raise an exception, 710 710 since if new_key exists, it is ambiguous as to what happens to the 711 711 associated values, and the position of new_key in the sequence. 712 712 713 713 >>> od = OrderedDict() 714 714 >>> od['a'] = 1 … … 732 732 raise ValueError("New key already exists: %r" % new_key) 733 733 # rename sequence entry 734 value = self[old_key] 734 value = self[old_key] 735 735 old_idx = self._sequence.index(old_key) 736 736 self._sequence[old_idx] = new_key … … 742 742 """ 743 743 This method allows you to set the items in the dict. 744 744 745 745 It takes a list of tuples - of the same sort returned by the ``items`` 746 746 method. 747 747 748 748 >>> d = OrderedDict() 749 749 >>> d.setitems(((3, 1), (2, 3), (1, 2))) … … 760 760 replace the current set. This must contain the same set of keys, but 761 761 need not be in the same order. 762 762 763 763 If you pass in new keys that don't match, a ``KeyError`` will be 764 764 raised. 765 765 766 766 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 767 767 >>> d.keys() … … 791 791 You can pass in a list of values, which will replace the 792 792 current list. The value list must be the same len as the OrderedDict. 793 793 794 794 (Or a ``ValueError`` is raised.) 795 795 796 796 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 797 797 >>> d.setvalues((1, 2, 3)) … … 813 813 """ 814 814 Return the position of the specified key in the OrderedDict. 815 815 816 816 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 817 817 >>> d.index(3) … … 826 826 """ 827 827 Takes ``index``, ``key``, and ``value`` as arguments. 828 828 829 829 Sets ``key`` to ``value``, so that ``key`` is at position ``index`` in 830 830 the OrderedDict. 831 831 832 832 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 833 833 >>> d.insert(0, 4, 0) … … 850 850 """ 851 851 Reverse the order of the OrderedDict. 852 852 853 853 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 854 854 >>> d.reverse() … … 861 861 """ 862 862 Sort the key order in the OrderedDict. 863 863 864 864 This method takes the same arguments as the ``list.sort`` method on 865 865 your version of Python. 866 866 867 867 >>> d = OrderedDict(((4, 1), (2, 2), (3, 3), (1, 4))) 868 868 >>> d.sort() … … 876 876 """ 877 877 Custom object for accessing the keys of an OrderedDict. 878 878 879 879 Can be called like the normal ``OrderedDict.keys`` method, but also 880 880 supports indexing and sequence methods. … … 897 897 You cannot assign to keys, but you can do slice assignment to re-order 898 898 them. 899 899 900 900 You can only do slice assignment if the new set of keys is a reordering 901 901 of the original set. … … 967 967 """ 968 968 Custom object for accessing the items of an OrderedDict. 969 969 970 970 Can be called like the normal ``OrderedDict.items`` method, but also 971 971 supports indexing and sequence methods. … … 1077 1077 """ 1078 1078 Custom object for accessing the values of an OrderedDict. 1079 1079 1080 1080 Can be called like the normal ``OrderedDict.values`` method, but also 1081 1081 supports indexing and sequence methods. … … 1099 1099 """ 1100 1100 Set the value at position i to value. 1101 1101 1102 1102 You can only do slice assignment to values if you supply a sequence of 1103 1103 equal length to the slice you are replacing. … … 1168 1168 Experimental version of OrderedDict that has a custom object for ``keys``, 1169 1169 ``values``, and ``items``. 1170 1170 1171 1171 These are callable sequence objects that work as methods, or can be 1172 1172 manipulated directly as sequences. 1173 1173 1174 1174 Test for ``keys``, ``items`` and ``values``. 1175 1175 1176 1176 >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4))) 1177 1177 >>> d … … 1293 1293 >>> d 1294 1294 SequenceOrderedDict([(1, 1), (2, 2), (3, 3)]) 1295 1295 1296 1296 >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4))) 1297 1297 >>> d -
src/sas/sascalc/data_util/uncertainty.py
r9a5097c r574adc7 2 2 Uncertainty propagation class for arithmetic, log and exp. 3 3 4 Based on scalars or numpy vectors, this class allows you to store and 4 Based on scalars or numpy vectors, this class allows you to store and 5 5 manipulate values+uncertainties, with propagation of gaussian error for 6 6 addition, subtraction, multiplication, division, power, exp and log. 7 7 8 8 Storage properties are determined by the numbers used to set the value 9 and uncertainty. Be sure to use floating point uncertainty vectors 9 and uncertainty. Be sure to use floating point uncertainty vectors 10 10 for inplace operations since numpy does not do automatic type conversion. 11 11 Normal operations can use mixed integer and floating point. In place … … 18 18 19 19 import numpy as np 20 import err1d 21 from formatnum import format_uncertainty 20 21 from .import err1d 22 from .formatnum import format_uncertainty 22 23 23 24 __all__ = ['Uncertainty'] … … 28 29 # Make standard deviation available 29 30 def _getdx(self): return np.sqrt(self.variance) 30 def _setdx(self,dx): 31 def _setdx(self,dx): 31 32 # Direct operation 32 33 # variance = dx**2 … … 38 39 # Constructor 39 40 def __init__(self, x, variance=None): 40 self.x, self.variance = x, variance 41 41 self.x, self.variance = x, variance 42 42 43 # Numpy array slicing operations 43 def __len__(self): 44 def __len__(self): 44 45 return len(self.x) 45 def __getitem__(self,key): 46 def __getitem__(self,key): 46 47 return Uncertainty(self.x[key],self.variance[key]) 47 48 def __setitem__(self,key,value): … … 137 138 def __idiv__(self, other): return self.__itruediv__(other) 138 139 139 140 140 141 # Unary ops 141 142 def __neg__(self): … … 151 152 return format_uncertainty(self.x,np.sqrt(self.variance)) 152 153 else: 153 return [format_uncertainty(v,dv) 154 return [format_uncertainty(v,dv) 154 155 for v,dv in zip(self.x,np.sqrt(self.variance))] 155 156 def __repr__(self): … … 219 220 z = a/4 220 221 assert z.x == 5./4 and z.variance == 3./4**2 221 222 222 223 # Reverse scalar operations 223 224 z = 4+a … … 229 230 z = 4/a 230 231 assert z.x == 4./5 and abs(z.variance - 3./5**4 * 4**2) < 1e-15 231 232 232 233 # Power operations 233 234 z = a**2 … … 250 251 assert z.x == 5./4 and abs(z.variance - (3./5**2 + 2./4**2)*(5./4)**2) < 1e-15 251 252 252 # ===== Inplace operations ===== 253 # ===== Inplace operations ===== 253 254 # Scalar operations 254 255 y = a+0; y += 4 … … 308 309 assert (z.x == 5./4).all() 309 310 assert (abs(z.variance - (3./5**2 + 2./4**2)*(5./4)**2) < 1e-15).all() 310 311 311 312 # printing; note that sqrt(3) ~ 1.7 312 313 assert str(Uncertainty(5,3)) == "5.0(17)" -
src/sas/sascalc/dataloader/__init__.py
rb699768 r574adc7 1 from data_info import *2 from manipulations import *3 from readers import *1 from .data_info import * 2 from .manipulations import * 3 from .readers import * -
src/sas/sascalc/dataloader/data_info.py
r17e257b5 r574adc7 716 716 self.y_unit = '1/cm' 717 717 except: # the data is not recognized/supported, and the user is notified 718 raise (TypeError,'data not recognized, check documentation for supported 1D data formats')718 raise TypeError('data not recognized, check documentation for supported 1D data formats') 719 719 720 720 def __str__(self): … … 796 796 len(self.y) != len(other.y): 797 797 msg = "Unable to perform operation: data length are not equal" 798 raise ValueError , msg798 raise ValueError(msg) 799 799 # Here we could also extrapolate between data points 800 800 TOLERANCE = 0.01 … … 802 802 if math.fabs((self.x[i] - other.x[i])/self.x[i]) > TOLERANCE: 803 803 msg = "Incompatible data sets: x-values do not match" 804 raise ValueError , msg804 raise ValueError(msg) 805 805 806 806 # Check that the other data set has errors, otherwise … … 876 876 if not isinstance(other, Data1D): 877 877 msg = "Unable to perform operation: different types of data set" 878 raise ValueError , msg878 raise ValueError(msg) 879 879 return True 880 880 … … 948 948 949 949 if len(self.detector) > 0: 950 raise RuntimeError , "Data2D: Detector bank already filled at init"950 raise RuntimeError("Data2D: Detector bank already filled at init") 951 951 952 952 def __str__(self): … … 1020 1020 len(self.qy_data) != len(other.qy_data): 1021 1021 msg = "Unable to perform operation: data length are not equal" 1022 raise ValueError , msg1022 raise ValueError(msg) 1023 1023 for ind in range(len(self.data)): 1024 1024 if math.fabs((self.qx_data[ind] - other.qx_data[ind])/self.qx_data[ind]) > TOLERANCE: 1025 1025 msg = "Incompatible data sets: qx-values do not match: %s %s" % (self.qx_data[ind], other.qx_data[ind]) 1026 raise ValueError , msg1026 raise ValueError(msg) 1027 1027 if math.fabs((self.qy_data[ind] - other.qy_data[ind])/self.qy_data[ind]) > TOLERANCE: 1028 1028 msg = "Incompatible data sets: qy-values do not match: %s %s" % (self.qy_data[ind], other.qy_data[ind]) 1029 raise ValueError , msg1029 raise ValueError(msg) 1030 1030 1031 1031 # Check that the scales match … … 1108 1108 if not isinstance(other, Data2D): 1109 1109 msg = "Unable to perform operation: different types of data set" 1110 raise ValueError , msg1110 raise ValueError(msg) 1111 1111 return True 1112 1112 -
src/sas/sascalc/dataloader/file_reader_base_class.py
rae69c690 r574adc7 8 8 import re 9 9 import logging 10 from abc import abstractmethod 11 10 12 import numpy as np 11 from abc import abstractmethod 12 from loader_exceptions import NoKnownLoaderException, FileContentsException,\ 13 from .loader_exceptions import NoKnownLoaderException, FileContentsException,\ 13 14 DataReaderException, DefaultReaderException 14 from data_info import Data1D, Data2D, DataInfo, plottable_1D, plottable_2D,\15 from .data_info import Data1D, Data2D, DataInfo, plottable_1D, plottable_2D,\ 15 16 combine_data_info_with_plottable 16 17 -
src/sas/sascalc/dataloader/loader.py
rdcb91cf r574adc7 26 26 import time 27 27 from zipfile import ZipFile 28 28 29 from sas.sascalc.data_util.registry import ExtensionRegistry 30 29 31 # Default readers are defined in the readers sub-module 30 import readers31 from loader_exceptions import NoKnownLoaderException, FileContentsException,\32 from . import readers 33 from .loader_exceptions import NoKnownLoaderException, FileContentsException,\ 32 34 DefaultReaderException 33 from readers import ascii_reader34 from readers import cansas_reader35 from readers import cansas_reader_HDF535 from .readers import ascii_reader 36 from .readers import cansas_reader 37 from .readers import cansas_reader_HDF5 36 38 37 39 logger = logging.getLogger(__name__) … … 341 343 # Raise an error if there are no matching extensions 342 344 if len(writers) == 0: 343 raise ValueError , "Unknown file type for " + path345 raise ValueError("Unknown file type for " + path) 344 346 # All done 345 347 return writers … … 360 362 try: 361 363 return fn(path, data) 362 except :364 except Exception: 363 365 pass # give other loaders a chance to succeed 364 366 # If we get here it is because all loaders failed -
src/sas/sascalc/dataloader/manipulations.py
r324e0bf r574adc7 26 26 27 27 #from data_info import plottable_2D 28 from data_info import Data1D28 from .data_info import Data1D 29 29 30 30 -
src/sas/sascalc/dataloader/readers/__init__.py
r7a5d066 r574adc7 1 1 # Method to associate extensions to default readers 2 from associations import read_associations2 from .associations import read_associations 3 3 4 4 -
src/sas/sascalc/dataloader/readers/abs_reader.py
rad92c5a r574adc7 31 31 # List of allowed extensions 32 32 ext = ['.abs'] 33 33 34 34 def get_file_contents(self): 35 """ 35 """ 36 36 Get the contents of the file 37 37 38 38 :raise RuntimeError: when the file can't be opened 39 39 :raise ValueError: when the length of the data vectors are inconsistent -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rf994e8b1 r574adc7 130 130 # Reset # of lines of data candidates 131 131 candidate_lines = 0 132 132 133 133 if not is_data: 134 134 self.set_all_to_none() -
src/sas/sascalc/dataloader/readers/associations.py
rce8c7bd r574adc7 40 40 """ 41 41 # For each FileType entry, get the associated reader and extension 42 for ext, reader in settings.ite ritems():42 for ext, reader in settings.items(): 43 43 if reader is not None and ext is not None: 44 44 # Associate the extension with a particular reader … … 47 47 # and remove the extra line below. 48 48 try: 49 exec "import %s" % reader50 exec "loader.associate_file_type('%s', %s)" % (ext.lower(),51 reader)52 exec "loader.associate_file_type('%s', %s)" % (ext.upper(),53 reader)49 exec("from . import %s" % reader) 50 exec("loader.associate_file_type('%s', %s)" 51 % (ext.lower(), reader)) 52 exec("loader.associate_file_type('%s', %s)" 53 % (ext.upper(), reader)) 54 54 except: 55 55 msg = "read_associations: skipping association" -
src/sas/sascalc/dataloader/readers/cansas_reader.py
rae69c690 r574adc7 1 1 import logging 2 import numpy as np3 2 import os 4 3 import sys 5 4 import datetime 6 5 import inspect 7 # For saving individual sections of data 8 from sas.sascalc.dataloader.data_info import Data1D, Data2D, DataInfo, \ 9 plottable_1D, plottable_2D 10 from sas.sascalc.dataloader.data_info import Collimation, TransmissionSpectrum, \ 11 Detector, Process, Aperture 12 from sas.sascalc.dataloader.data_info import \ 13 combine_data_info_with_plottable as combine_data 14 import sas.sascalc.dataloader.readers.xml_reader as xml_reader 15 from sas.sascalc.dataloader.readers.xml_reader import XMLreader 16 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants, CurrentLevel 17 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, \ 18 DefaultReaderException, DataReaderException 6 7 import numpy as np 19 8 20 9 # The following 2 imports *ARE* used. Do not remove either. … … 23 12 24 13 from lxml import etree 14 15 from sas.sascalc.data_util.nxsunit import Converter 16 17 # For saving individual sections of data 18 from ..data_info import Data1D, Data2D, DataInfo, plottable_1D, plottable_2D, \ 19 Collimation, TransmissionSpectrum, Detector, Process, Aperture, \ 20 combine_data_info_with_plottable as combine_data 21 from ..loader_exceptions import FileContentsException, DefaultReaderException, \ 22 DataReaderException 23 from . import xml_reader 24 from .xml_reader import XMLreader 25 from .cansas_constants import CansasConstants, CurrentLevel 25 26 26 27 logger = logging.getLogger(__name__) … … 34 35 "as much of the data as possible.\n\n" 35 36 HAS_CONVERTER = True 36 try:37 from sas.sascalc.data_util.nxsunit import Converter38 except ImportError:39 HAS_CONVERTER = False40 37 41 38 CONSTANTS = CansasConstants() … … 163 160 raise fc_exc 164 161 except Exception as e: # Convert all other exceptions to FileContentsExceptions 165 raise FileContentsException(e.message) 162 raise 163 raise FileContentsException(str(e)) 166 164 167 165 … … 632 630 else: 633 631 save_in = "current_datainfo" 634 exec "default_unit = self.{0}.{1}".format(save_in, unitname) 635 if local_unit and default_unit and local_unit.lower() != default_unit.lower() \ 636 and local_unit.lower() != "none": 637 if HAS_CONVERTER == True: 632 exec("default_unit = self.{0}.{1}".format(save_in, unitname)) 633 if (local_unit and default_unit 634 and local_unit.lower() != default_unit.lower() 635 and local_unit.lower() != "none"): 636 if HAS_CONVERTER: 638 637 # Check local units - bad units raise KeyError 639 638 data_conv_q = Converter(local_unit) … … 654 653 err_msg += "expecting [{0}]".format(default_unit) 655 654 value_unit = local_unit 656 except :655 except Exception: 657 656 err_msg = "CanSAS reader: unknown error converting " 658 657 err_msg += "\"{0}\" unit [{1}]" … … 908 907 point = self.create_element("Idata") 909 908 node.append(point) 910 qx = ','.join( [str(datainfo.qx_data[i]) for i in xrange(len(datainfo.qx_data))])911 qy = ','.join( [str(datainfo.qy_data[i]) for i in xrange(len(datainfo.qy_data))])912 intensity = ','.join( [str(datainfo.data[i]) for i in xrange(len(datainfo.data))])909 qx = ','.join(str(v) for v in datainfo.qx_data) 910 qy = ','.join(str(v) for v in datainfo.qy_data) 911 intensity = ','.join(str(v) for v in datainfo.data) 913 912 914 913 self.write_node(point, "Qx", qx, … … 919 918 {'unit': datainfo._zunit}) 920 919 if datainfo.err_data is not None: 921 err = ','.join([str(datainfo.err_data[i]) for i in 922 xrange(len(datainfo.err_data))]) 920 err = ','.join(str(v) for v in datainfo.err_data) 923 921 self.write_node(point, "Idev", err, 924 922 {'unit': datainfo._zunit}) 925 923 if datainfo.dqy_data is not None: 926 dqy = ','.join([str(datainfo.dqy_data[i]) for i in 927 xrange(len(datainfo.dqy_data))]) 924 dqy = ','.join(str(v) for v in datainfo.dqy_data) 928 925 self.write_node(point, "Qydev", dqy, 929 926 {'unit': datainfo._yunit}) 930 927 if datainfo.dqx_data is not None: 931 dqx = ','.join([str(datainfo.dqx_data[i]) for i in 932 xrange(len(datainfo.dqx_data))]) 928 dqx = ','.join(str(v) for v in datainfo.dqx_data) 933 929 self.write_node(point, "Qxdev", dqx, 934 930 {'unit': datainfo._xunit}) 935 931 if datainfo.mask is not None: 936 mask = ','.join( 937 ["1" if datainfo.mask[i] else "0" 938 for i in xrange(len(datainfo.mask))]) 932 mask = ','.join("1" if v else "0" for v in datainfo.mask) 939 933 self.write_node(point, "Mask", mask) 940 934 … … 1280 1274 try: 1281 1275 value = float(entry.text) 1282 except :1276 except ValueError: 1283 1277 value = None 1284 1278 … … 1289 1283 if units is not None: 1290 1284 toks = variable.split('.') 1291 local_unit = None 1292 exec "local_unit = storage.%s_unit" % toks[0] 1285 exec("local_unit = storage.%s_unit" % toks[0]) 1293 1286 if local_unit is not None and units.lower() != local_unit.lower(): 1294 1287 if HAS_CONVERTER == True: 1295 1288 try: 1296 1289 conv = Converter(units) 1297 exec "storage.%s = %g" % \1298 (variable, conv(value, units=local_unit))1299 except :1290 exec("storage.%s = %g" % 1291 - (variable, conv(value, units=local_unit))) 1292 except Exception: 1300 1293 _, exc_value, _ = sys.exc_info() 1301 1294 err_mess = "CanSAS reader: could not convert" … … 1306 1299 logger.info(err_mess) 1307 1300 else: 1308 raise ValueError , err_mess1301 raise ValueError(err_mess) 1309 1302 else: 1310 1303 err_mess = "CanSAS reader: unrecognized %s unit [%s];"\ … … 1315 1308 logger.info(err_mess) 1316 1309 else: 1317 raise ValueError , err_mess1310 raise ValueError(err_mess) 1318 1311 else: 1319 exec "storage.%s = value" % variable1312 exec("storage.%s = value" % variable) 1320 1313 else: 1321 exec "storage.%s = value" % variable1314 exec("storage.%s = value" % variable) 1322 1315 1323 1316 # DO NOT REMOVE - used in saving and loading panel states. … … 1339 1332 entry = get_content(location, node) 1340 1333 if entry is not None and entry.text is not None: 1341 exec "storage.%s = entry.text.strip()" % variable1334 exec("storage.%s = entry.text.strip()" % variable) 1342 1335 1343 1336 # DO NOT REMOVE Called by outside packages: -
src/sas/sascalc/dataloader/readers/danse_reader.py
ra78a02f r574adc7 14 14 import math 15 15 import os 16 import logging 17 16 18 import numpy as np 17 import logging 18 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector19 from sas.sascalc.dataloader.manipulations import reader2D_converter20 from sas.sascalc.dataloader.file_reader_base_class import FileReader21 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException19 20 from ..data_info import plottable_2D, DataInfo, Detector 21 from ..manipulations import reader2D_converter 22 from ..file_reader_base_class import FileReader 23 from ..loader_exceptions import FileContentsException, DataReaderException 22 24 23 25 logger = logging.getLogger(__name__) -
src/sas/sascalc/dataloader/readers/sesans_reader.py
rbe43448 r574adc7 6 6 Jurrian Bakker 7 7 """ 8 import os 9 8 10 import numpy as np 9 import os 10 from sas.sascalc.dataloader.file_reader_base_class import FileReader11 from sas.sascalc.dataloader.data_info import plottable_1D, DataInfo12 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException11 12 from ..file_reader_base_class import FileReader 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException, DataReaderException 13 15 14 16 # Check whether we have a converter available -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r959eb01 r574adc7 2 2 #This software was developed by the University of Tennessee as part of the 3 3 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 4 #project funded by the US National Science Foundation. 4 #project funded by the US National Science Foundation. 5 5 #See the license text in license.txt 6 6 #copyright 2008, University of Tennessee … … 31 31 ## Extension 32 32 ext = ['.tif', '.tiff'] 33 33 34 34 def read(self, filename=None): 35 35 """ 36 36 Open and read the data in a file 37 37 38 38 :param file: path of the file 39 39 """ … … 44 44 except: 45 45 msg = "tiff_reader: could not load file. Missing Image module." 46 raise RuntimeError , msg47 46 raise RuntimeError(msg) 47 48 48 # Instantiate data object 49 49 output = Data2D() 50 50 output.filename = os.path.basename(filename) 51 51 52 52 # Read in the image 53 53 try: 54 54 im = Image.open(filename) 55 55 except: 56 raise RuntimeError , "cannot open %s"%(filename)56 raise RuntimeError("cannot open %s"%(filename)) 57 57 data = im.getdata() 58 58 … … 61 61 output.err_data = np.zeros([im.size[0], im.size[1]]) 62 62 output.mask = np.ones([im.size[0], im.size[1]], dtype=bool) 63 63 64 64 # Initialize 65 65 x_vals = [] … … 69 69 for i_x in range(im.size[0]): 70 70 x_vals.append(i_x) 71 71 72 72 itot = 0 73 73 for i_y in range(im.size[1]): … … 80 80 logger.error("tiff_reader: had to skip a non-float point") 81 81 continue 82 82 83 83 # Get bin number 84 84 if math.fmod(itot, im.size[0]) == 0: … … 87 87 else: 88 88 i_x += 1 89 89 90 90 output.data[im.size[1] - 1 - i_y][i_x] = value 91 91 92 92 itot += 1 93 93 94 94 output.xbins = im.size[0] 95 95 output.ybins = im.size[1] … … 102 102 output.ymin = 0 103 103 output.ymax = im.size[0] - 1 104 104 105 105 # Store loading process information 106 106 output.meta_data['loader'] = self.type_name -
src/sas/sascalc/dataloader/readers/xml_reader.py
rcd57c7d4 r574adc7 16 16 17 17 import logging 18 18 19 from lxml import etree 19 20 from lxml.builder import E 21 20 22 from sas.sascalc.dataloader.file_reader_base_class import FileReader 21 23 … … 151 153 Converts an etree element into a string 152 154 """ 153 return etree.tostring(elem, pretty_print=pretty_print, \155 return etree.tostring(elem, pretty_print=pretty_print, 154 156 encoding=encoding) 155 157 -
src/sas/sascalc/file_converter/cansas_writer.py
r7432acb r574adc7 32 32 valid_class = all([issubclass(data.__class__, Data1D) for data in frame_data]) 33 33 if not valid_class: 34 raise RuntimeError ,("The cansas writer expects an array of "34 raise RuntimeError("The cansas writer expects an array of " 35 35 "Data1D instances") 36 36 -
src/sas/sascalc/file_converter/nxcansas_writer.py
r5e906207 r574adc7 166 166 'wavelength_max': 'wavelength_max', 167 167 'wavelength_spread': 'incident_wavelength_spread' } 168 for sasname, nxname in wavelength_keys.ite ritems():168 for sasname, nxname in wavelength_keys.items(): 169 169 value = getattr(data_info.source, sasname) 170 170 units = getattr(data_info.source, sasname + '_unit') -
src/sas/sascalc/fit/AbstractFitEngine.py
r50fcb09 r574adc7 251 251 msg = "FitData1D: invalid error array " 252 252 msg += "%d <> %d" % (np.shape(self.dy), np.size(fx)) 253 raise RuntimeError , msg253 raise RuntimeError(msg) 254 254 return (self.y[self.idx] - fx[self.idx]) / self.dy[self.idx], fx[self.idx] 255 255 -
src/sas/sascalc/fit/Loader.py
ra1b8fee r574adc7 18 18 self.dy = dy 19 19 self.filename = None 20 20 21 21 def set_filename(self, path=None): 22 22 """ 23 Store path into a variable.If the user doesn't give 23 Store path into a variable.If the user doesn't give 24 24 a path as a parameter a pop-up 25 25 window appears to select the file. 26 26 27 27 :param path: the path given by the user 28 28 29 29 """ 30 30 self.filename = path 31 31 32 32 def get_filename(self): 33 33 """ return the file's path""" 34 34 return self.filename 35 35 36 36 def set_values(self): 37 37 """ Store the values loaded from file in local variables""" … … 42 42 self.x = [] 43 43 self.y = [] 44 self.dx = [] 44 self.dx = [] 45 45 self.dy = [] 46 46 for line in lines: … … 50 50 y = float(toks[1]) 51 51 dy = float(toks[2]) 52 52 53 53 self.x.append(x) 54 54 self.y.append(y) … … 59 59 # Sanity check 60 60 if not len(self.x) == len(self.dx): 61 raise ValueError , "x and dx have different length"61 raise ValueError("x and dx have different length") 62 62 if not len(self.y) == len(self.dy): 63 raise ValueError , "y and dy have different length"64 65 63 raise ValueError("y and dy have different length") 64 65 66 66 def get_values(self): 67 67 """ Return x, y, dx, dy""" 68 68 return self.x, self.y, self.dx, self.dy 69 69 70 70 def load_data(self, data): 71 71 """ Return plottable""" … … 77 77 #Load its View class 78 78 #plottable.reset_view() 79 80 81 if __name__ == "__main__": 79 80 81 if __name__ == "__main__": 82 82 load = Load() 83 83 load.set_filename("testdata_line.txt") 84 print(load.get_filename()) 84 print(load.get_filename()) 85 85 load.set_values() 86 86 print(load.get_values()) 87 88 87 -
src/sas/sascalc/fit/MultiplicationModel.py
r7432acb r574adc7 109 109 """ 110 110 ##set dispersion only from p_model 111 for name , value in self.p_model.dispersion.ite ritems():111 for name , value in self.p_model.dispersion.items(): 112 112 self.dispersion[name] = value 113 113 … … 135 135 """ 136 136 137 for name , value in self.p_model.params.ite ritems():137 for name , value in self.p_model.params.items(): 138 138 if not name in self.params.keys() and name not in self.excluded_params: 139 139 self.params[name] = value 140 140 141 for name , value in self.s_model.params.ite ritems():141 for name , value in self.s_model.params.items(): 142 142 #Remove the radius_effective from the (P*S) model parameters. 143 143 if not name in self.params.keys() and name not in self.excluded_params: … … 155 155 this model's details 156 156 """ 157 for name, detail in self.p_model.details.ite ritems():157 for name, detail in self.p_model.details.items(): 158 158 if name not in self.excluded_params: 159 159 self.details[name] = detail 160 160 161 for name , detail in self.s_model.details.ite ritems():161 for name , detail in self.s_model.details.items(): 162 162 if not name in self.details.keys() or name not in self.exluded_params: 163 163 self.details[name] = detail … … 245 245 return 246 246 247 raise ValueError , "Model does not contain parameter %s" % name247 raise ValueError("Model does not contain parameter %s" % name) 248 248 249 249 -
src/sas/sascalc/fit/expression.py
ra1b8fee r574adc7 59 59 occur multiple times. The return value is a set with the elements in 60 60 no particular order. 61 61 62 62 This is the first step in computing a dependency graph. 63 63 """ … … 81 81 offset = end 82 82 pieces.append(expr[offset:]) 83 83 84 84 # Join the pieces and return them 85 85 return "".join(pieces) … … 88 88 """ 89 89 Returns a list of pair-wise dependencies from the parameter expressions. 90 90 91 91 For example, if p3 = p1+p2, then find_dependencies([p1,p2,p3]) will 92 92 return [(p3,p1),(p3,p2)]. For base expressions without dependencies, … … 110 110 """ 111 111 Find the parameter substitution we need so that expressions can 112 be evaluated without having to traverse a chain of 112 be evaluated without having to traverse a chain of 113 113 model.layer.parameter.value 114 114 """ … … 122 122 return definition, substitution 123 123 124 def no_constraints(): 124 def no_constraints(): 125 125 """ 126 126 This parameter set has no constraints between the parameters. … … 163 163 164 164 Parameter names are assumed to contain only _.a-zA-Z0-9#[] 165 165 166 166 Both names are provided for inverse functions, e.g., acos and arccos. 167 167 168 168 Should try running the function to identify syntax errors before 169 169 running it in a fit. 170 170 171 171 Use help(fn) to see the code generated for the returned function fn. 172 172 dis.dis(fn) will show the corresponding python vm instructions. … … 239 239 if independent == emptyset: 240 240 cycleset = ", ".join(str(s) for s in left) 241 raise ValueError ,"Cyclic dependencies amongst %s"%cycleset241 raise ValueError("Cyclic dependencies amongst %s"%cycleset) 242 242 243 243 # The possibly resolvable items are those that depend on the independents … … 267 267 n.sort() 268 268 items = list(items); items.sort() 269 raise Exception,"%s expect %s to contain %s for %s"%(msg,n,items,pairs)269 raise ValueError("%s expect %s to contain %s for %s"%(msg,n,items,pairs)) 270 270 for lo,hi in pairs: 271 271 if lo in n and hi in n and n.index(lo) >= n.index(hi): 272 raise Exception,"%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)272 raise ValueError("%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)) 273 273 274 274 def test_deps(): … … 288 288 # Cycle test 289 289 pairs = [(1,4),(4,3),(4,5),(5,1)] 290 try: n = order_dependencies(pairs) 291 except ValueError: pass 292 else: raise Exception,"test3 expect ValueError exception for %s"%(pairs,) 290 try: 291 n = order_dependencies(pairs) 292 except ValueError: 293 pass 294 else: 295 raise ValueError("test3 expect ValueError exception for %s"%(pairs,)) 293 296 294 297 # large test for gross speed check … … 308 311 import inspect, dis 309 312 import math 310 313 311 314 symtab = {'a.b.x':1, 'a.c':2, 'a.b':3, 'b.x':4} 312 315 expr = 'a.b.x + sin(4*pi*a.c) + a.b.x/a.b' 313 316 314 317 # Check symbol lookup 315 318 assert _symbols(expr, symtab) == set([1,2,3]) … … 357 360 expected = 2*math.pi*math.sin(5/.1875) + 6 358 361 assert p2.value == expected,"Value was %s, not %s"%(p2.value,expected) 359 362 360 363 # Check empty dependency set doesn't crash 361 364 fn = compile_constraints(*world(p1,p3)) … … 381 384 fn() 382 385 assert p5.value == 2.07,"Value for %s was %s"%(p5.expression,p5.value) 383 386 384 387 385 388 # Verify that we capture invalid expressions 386 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 389 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 387 390 'piddle', 388 391 '5; import sys; print "p0wned"', -
src/sas/sascalc/fit/pagestate.py
r277257f r574adc7 313 313 314 314 if len(self.disp_obj_dict) > 0: 315 for k, v in self.disp_obj_dict.ite ritems():315 for k, v in self.disp_obj_dict.items(): 316 316 obj.disp_obj_dict[k] = v 317 317 if len(self.disp_cb_dict) > 0: 318 for k, v in self.disp_cb_dict.ite ritems():318 for k, v in self.disp_cb_dict.items(): 319 319 obj.disp_cb_dict[k] = v 320 320 if len(self.values) > 0: 321 for k, v in self.values.ite ritems():321 for k, v in self.values.items(): 322 322 obj.values[k] = v 323 323 if len(self.weights) > 0: 324 for k, v in self.weights.ite ritems():324 for k, v in self.weights.items(): 325 325 obj.weights[k] = v 326 326 obj.enable_smearer = copy.deepcopy(self.enable_smearer) … … 347 347 obj.version = copy.deepcopy(self.version) 348 348 349 for name, state in self.saved_states.ite ritems():349 for name, state in self.saved_states.items(): 350 350 copy_name = copy.deepcopy(name) 351 351 copy_state = state.clone() … … 430 430 """ 431 431 p_map = [] 432 for name, info in params.ite ritems():432 for name, info in params.items(): 433 433 if ".fittable" in name or ".std" in name or ".upper" in name or \ 434 434 ".lower" in name or ".units" in name: … … 475 475 formfactor, str_params = convert.convert_model( 476 476 self.formfactorcombobox, str_pars, False, self.version) 477 for key, value in str_params.ite ritems():477 for key, value in str_params.items(): 478 478 params[key] = value 479 479 … … 835 835 element = newdoc.createElement(item[0]) 836 836 value_list = getattr(self, item[1]) 837 for key, value in value_list.ite ritems():837 for key, value in value_list.items(): 838 838 sub_element = newdoc.createElement(key) 839 839 sub_element.setAttribute('name', str(key)) … … 848 848 element = newdoc.createElement(tagname) 849 849 value_list = getattr(self, varname) 850 for key, value in value_list.ite ritems():850 for key, value in value_list.items(): 851 851 sub_element = newdoc.createElement(key) 852 852 sub_element.setAttribute('name', str(key)) … … 950 950 msg = "PageState no longer supports non-CanSAS" 951 951 msg += " format for fitting files" 952 raise RuntimeError , msg952 raise RuntimeError(msg) 953 953 954 954 if node.get('version'): … … 1241 1241 else: 1242 1242 self.call_back(format=ext) 1243 raise RuntimeError , "%s is not a file" % path1243 raise RuntimeError("%s is not a file" % path) 1244 1244 1245 1245 # Return output consistent with the loader's api -
src/sas/sascalc/fit/pluginmodel.py
r5213d22 r574adc7 35 35 return self.function(x_val)*self.function(y_val) 36 36 elif x.__class__.__name__ == 'tuple': 37 raise ValueError , "Tuples are not allowed as input to BaseComponent models"37 raise ValueError("Tuples are not allowed as input to BaseComponent models") 38 38 else: 39 39 return self.function(x) … … 52 52 return self.function(x[0])*self.function(x[1]) 53 53 elif x.__class__.__name__ == 'tuple': 54 raise ValueError , "Tuples are not allowed as input to BaseComponent models"54 raise ValueError("Tuples are not allowed as input to BaseComponent models") 55 55 else: 56 56 return self.function(x) -
src/sas/sascalc/invariant/invariant.py
rb1f20d1 r574adc7 424 424 if not issubclass(data.__class__, LoaderData1D): 425 425 #Process only data that inherited from DataLoader.Data_info.Data1D 426 raise ValueError , "Data must be of type DataLoader.Data1D"426 raise ValueError("Data must be of type DataLoader.Data1D") 427 427 #from copy import deepcopy 428 428 new_data = (self._scale * data) - self._background … … 484 484 msg = "Length x and y must be equal" 485 485 msg += " and greater than 1; got x=%s, y=%s" % (len(data.x), len(data.y)) 486 raise ValueError , msg486 raise ValueError(msg) 487 487 else: 488 488 # Take care of smeared data … … 507 507 #iterate between for element different 508 508 #from the first and the last 509 for i in xrange(1, n - 1):509 for i in range(1, n - 1): 510 510 dxi = (data.x[i + 1] - data.x[i - 1]) / 2 511 511 total += gx[i] * data.y[i] * dxi … … 533 533 msg = "Length of data.x and data.y must be equal" 534 534 msg += " and greater than 1; got x=%s, y=%s" % (len(data.x), len(data.y)) 535 raise ValueError , msg535 raise ValueError(msg) 536 536 else: 537 537 #Create error for data without dy error … … 560 560 #iterate between for element different 561 561 #from the first and the last 562 for i in xrange(1, n - 1):562 for i in range(1, n - 1): 563 563 dxi = (data.x[i + 1] - data.x[i - 1]) / 2 564 564 total += (gx[i] * dy[i] * dxi) ** 2 … … 742 742 range = range.lower() 743 743 if range not in ['high', 'low']: 744 raise ValueError , "Extrapolation range should be 'high' or 'low'"744 raise ValueError("Extrapolation range should be 'high' or 'low'") 745 745 function = function.lower() 746 746 if function not in ['power_law', 'guinier']: 747 747 msg = "Extrapolation function should be 'guinier' or 'power_law'" 748 raise ValueError , msg748 raise ValueError(msg) 749 749 750 750 if range == 'high': 751 751 if function != 'power_law': 752 752 msg = "Extrapolation only allows a power law at high Q" 753 raise ValueError , msg753 raise ValueError(msg) 754 754 self._high_extrapolation_npts = npts 755 755 self._high_extrapolation_power = power … … 852 852 """ 853 853 if contrast <= 0: 854 raise ValueError , "The contrast parameter must be greater than zero"854 raise ValueError("The contrast parameter must be greater than zero") 855 855 856 856 # Make sure Q star is up to date … … 859 859 if self._qstar <= 0: 860 860 msg = "Invalid invariant: Invariant Q* must be greater than zero" 861 raise RuntimeError , msg861 raise RuntimeError(msg) 862 862 863 863 # Compute intermediate constant … … 869 869 if discrim < 0: 870 870 msg = "Could not compute the volume fraction: negative discriminant" 871 raise RuntimeError , msg871 raise RuntimeError(msg) 872 872 elif discrim == 0: 873 873 return 1 / 2 … … 881 881 return volume2 882 882 msg = "Could not compute the volume fraction: inconsistent results" 883 raise RuntimeError , msg883 raise RuntimeError(msg) 884 884 885 885 def get_qstar_with_error(self, extrapolation=None): -
src/sas/sascalc/pr/fit/AbstractFitEngine.py
r50fcb09 r574adc7 251 251 msg = "FitData1D: invalid error array " 252 252 msg += "%d <> %d" % (np.shape(self.dy), np.size(fx)) 253 raise RuntimeError , msg253 raise RuntimeError(msg) 254 254 return (self.y[self.idx] - fx[self.idx]) / self.dy[self.idx], fx[self.idx] 255 255 -
src/sas/sascalc/pr/fit/Loader.py
ra1b8fee r574adc7 18 18 self.dy = dy 19 19 self.filename = None 20 20 21 21 def set_filename(self, path=None): 22 22 """ 23 Store path into a variable.If the user doesn't give 23 Store path into a variable.If the user doesn't give 24 24 a path as a parameter a pop-up 25 25 window appears to select the file. 26 26 27 27 :param path: the path given by the user 28 28 29 29 """ 30 30 self.filename = path 31 31 32 32 def get_filename(self): 33 33 """ return the file's path""" 34 34 return self.filename 35 35 36 36 def set_values(self): 37 37 """ Store the values loaded from file in local variables""" … … 42 42 self.x = [] 43 43 self.y = [] 44 self.dx = [] 44 self.dx = [] 45 45 self.dy = [] 46 46 for line in lines: … … 50 50 y = float(toks[1]) 51 51 dy = float(toks[2]) 52 52 53 53 self.x.append(x) 54 54 self.y.append(y) … … 59 59 # Sanity check 60 60 if not len(self.x) == len(self.dx): 61 raise ValueError , "x and dx have different length"61 raise ValueError("x and dx have different length") 62 62 if not len(self.y) == len(self.dy): 63 raise ValueError , "y and dy have different length"64 65 63 raise ValueError("y and dy have different length") 64 65 66 66 def get_values(self): 67 67 """ Return x, y, dx, dy""" 68 68 return self.x, self.y, self.dx, self.dy 69 69 70 70 def load_data(self, data): 71 71 """ Return plottable""" … … 77 77 #Load its View class 78 78 #plottable.reset_view() 79 80 81 if __name__ == "__main__": 79 80 81 if __name__ == "__main__": 82 82 load = Load() 83 83 load.set_filename("testdata_line.txt") 84 print(load.get_filename()) 84 print(load.get_filename()) 85 85 load.set_values() 86 86 print(load.get_values()) 87 88 87 -
src/sas/sascalc/pr/fit/expression.py
ra1b8fee r574adc7 59 59 occur multiple times. The return value is a set with the elements in 60 60 no particular order. 61 61 62 62 This is the first step in computing a dependency graph. 63 63 """ … … 81 81 offset = end 82 82 pieces.append(expr[offset:]) 83 83 84 84 # Join the pieces and return them 85 85 return "".join(pieces) … … 88 88 """ 89 89 Returns a list of pair-wise dependencies from the parameter expressions. 90 90 91 91 For example, if p3 = p1+p2, then find_dependencies([p1,p2,p3]) will 92 92 return [(p3,p1),(p3,p2)]. For base expressions without dependencies, … … 110 110 """ 111 111 Find the parameter substitution we need so that expressions can 112 be evaluated without having to traverse a chain of 112 be evaluated without having to traverse a chain of 113 113 model.layer.parameter.value 114 114 """ … … 122 122 return definition, substitution 123 123 124 def no_constraints(): 124 def no_constraints(): 125 125 """ 126 126 This parameter set has no constraints between the parameters. … … 163 163 164 164 Parameter names are assumed to contain only _.a-zA-Z0-9#[] 165 165 166 166 Both names are provided for inverse functions, e.g., acos and arccos. 167 167 168 168 Should try running the function to identify syntax errors before 169 169 running it in a fit. 170 170 171 171 Use help(fn) to see the code generated for the returned function fn. 172 172 dis.dis(fn) will show the corresponding python vm instructions. … … 239 239 if independent == emptyset: 240 240 cycleset = ", ".join(str(s) for s in left) 241 raise ValueError ,"Cyclic dependencies amongst %s"%cycleset241 raise ValueError("Cyclic dependencies amongst %s"%cycleset) 242 242 243 243 # The possibly resolvable items are those that depend on the independents … … 267 267 n.sort() 268 268 items = list(items); items.sort() 269 raise Exception,"%s expect %s to contain %s for %s"%(msg,n,items,pairs)269 raise ValueError("%s expect %s to contain %s for %s"%(msg,n,items,pairs)) 270 270 for lo,hi in pairs: 271 271 if lo in n and hi in n and n.index(lo) >= n.index(hi): 272 raise Exception,"%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)272 raise ValueError("%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)) 273 273 274 274 def test_deps(): … … 288 288 # Cycle test 289 289 pairs = [(1,4),(4,3),(4,5),(5,1)] 290 try: n = order_dependencies(pairs) 291 except ValueError: pass 292 else: raise Exception,"test3 expect ValueError exception for %s"%(pairs,) 290 try: 291 n = order_dependencies(pairs) 292 except ValueError: 293 pass 294 else: 295 raise Exception("test3 expect ValueError exception for %s"%(pairs,)) 293 296 294 297 # large test for gross speed check … … 308 311 import inspect, dis 309 312 import math 310 313 311 314 symtab = {'a.b.x':1, 'a.c':2, 'a.b':3, 'b.x':4} 312 315 expr = 'a.b.x + sin(4*pi*a.c) + a.b.x/a.b' 313 316 314 317 # Check symbol lookup 315 318 assert _symbols(expr, symtab) == set([1,2,3]) … … 357 360 expected = 2*math.pi*math.sin(5/.1875) + 6 358 361 assert p2.value == expected,"Value was %s, not %s"%(p2.value,expected) 359 362 360 363 # Check empty dependency set doesn't crash 361 364 fn = compile_constraints(*world(p1,p3)) … … 381 384 fn() 382 385 assert p5.value == 2.07,"Value for %s was %s"%(p5.expression,p5.value) 383 386 384 387 385 388 # Verify that we capture invalid expressions 386 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 389 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 387 390 'piddle', 388 391 '5; import sys; print "p0wned"', -
src/sas/sascalc/pr/invertor.py
rcb62bd5 r574adc7 148 148 msg = "Invertor: one of your q-values is zero. " 149 149 msg += "Delete that entry before proceeding" 150 raise ValueError , msg150 raise ValueError(msg) 151 151 return self.set_x(value) 152 152 elif name == 'y': … … 159 159 msg = "Invertor: d_max must be greater than zero." 160 160 msg += "Correct that entry before proceeding" 161 raise ValueError , msg161 raise ValueError(msg) 162 162 return self.set_dmax(value) 163 163 elif name == 'q_min': … … 181 181 return self.set_est_bck(0) 182 182 else: 183 raise ValueError , "Invertor: est_bck can only be True or False"183 raise ValueError("Invertor: est_bck can only be True or False") 184 184 185 185 return Cinvertor.__setattr__(self, name, value) … … 331 331 if self.is_valid() <= 0: 332 332 msg = "Invertor.invert: Data array are of different length" 333 raise RuntimeError , msg333 raise RuntimeError(msg) 334 334 335 335 p = np.ones(nfunc) … … 364 364 if self.is_valid() <= 0: 365 365 msg = "Invertor.invert: Data arrays are of different length" 366 raise RuntimeError , msg366 raise RuntimeError(msg) 367 367 368 368 p = np.ones(nfunc) … … 448 448 if self.is_valid() < 0: 449 449 msg = "Invertor: invalid data; incompatible data lengths." 450 raise RuntimeError , msg450 raise RuntimeError(msg) 451 451 452 452 self.nfunc = nfunc … … 473 473 self._get_matrix(nfunc, nq, a, b) 474 474 except: 475 raise RuntimeError , "Invertor: could not invert I(Q)\n %s" % sys.exc_value475 raise RuntimeError("Invertor: could not invert I(Q)\n %s" % sys.exc_value) 476 476 477 477 # Perform the inversion (least square fit) … … 756 756 except: 757 757 msg = "Invertor.from_file: corrupted file\n%s" % sys.exc_value 758 raise RuntimeError , msg758 raise RuntimeError(msg) 759 759 else: 760 760 msg = "Invertor.from_file: '%s' is not a file" % str(path) 761 raise RuntimeError , msg761 raise RuntimeError(msg) -
test/sasdataloader/test/utest_abs_reader.py
rae69c690 r574adc7 224 224 _found2 = True 225 225 226 if _found1 == False or _found2 == False: 227 raise RuntimeError, "Could not find all data %s %s" % (_found1, _found2) 226 if not _found1 or not _found2: 227 raise RuntimeError("Could not find all data %s %s" 228 % (_found1, _found2)) 228 229 229 230 # Detector … … 269 270 _found_term1 = True 270 271 271 if _found_term1 == False or _found_term2 == False: 272 raise RuntimeError, "Could not find all process terms %s %s" % (_found_term1, _found_term2) 272 if not _found_term1 or not _found_term2: 273 raise RuntimeError("Could not find all process terms %s %s" 274 % (_found_term1, _found_term2)) 273 275 274 276 def test_writer(self): -
test/sasdataloader/test/utest_cansas.py
r17e257b5 r574adc7 2 2 Unit tests for the new recursive cansas reader 3 3 """ 4 import os 5 import sys 6 import StringIO 7 import unittest 8 import logging 9 import warnings 10 11 from lxml import etree 12 from lxml.etree import XMLSyntaxError 13 from xml.dom import minidom 14 4 15 import sas.sascalc.dataloader.readers.cansas_reader as cansas 5 16 from sas.sascalc.dataloader.loader import Loader … … 8 19 from sas.sascalc.dataloader.readers.cansas_reader import Reader 9 20 from sas.sascalc.dataloader.readers.cansas_constants import CansasConstants 10 11 import os12 import sys13 import urllib214 import StringIO15 import pylint as pylint16 import unittest17 import numpy as np18 import logging19 import warnings20 21 from lxml import etree22 from lxml.etree import XMLSyntaxError23 from xml.dom import minidom24 21 25 22 logger = logging.getLogger(__name__) … … 309 306 310 307 if __name__ == '__main__': 311 unittest.main() 308 unittest.main() -
test/utest_sasview.py
rbe51cf6 r574adc7 62 62 proc = subprocess.Popen(code, shell=True, stdout=subprocess.PIPE, stderr = subprocess.STDOUT) 63 63 std_out, std_err = proc.communicate() 64 std_out, std_err = std_out.decode(), (std_err.decode() if std_err else None) 64 65 #print(">>>>>> standard out", file_path, "\n", std_out, "\n>>>>>>>>> end stdout", file_path) 65 66 #sys.exit() … … 109 110 if run_tests(dirs=dirs, all=all)>0: 110 111 sys.exit(1) 111
Note: See TracChangeset
for help on using the changeset viewer.