Changeset 9e6aeaf in sasview for src/sas/sascalc
- Timestamp:
- Sep 25, 2017 5:35:29 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 3cb3a51
- Parents:
- 9efdb29 (diff), 0315b63 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 43 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r574adc7 r9e6aeaf 1176 1176 final_dataset.yaxis(data._yaxis, data._yunit) 1177 1177 final_dataset.zaxis(data._zaxis, data._zunit) 1178 final_dataset.x_bins = data.x_bins 1179 final_dataset.y_bins = data.y_bins 1178 if len(data.data.shape) == 2: 1179 n_rows, n_cols = data.data.shape 1180 final_dataset.y_bins = data.qy_data[0::int(n_cols)] 1181 final_dataset.x_bins = data.qx_data[:int(n_cols)] 1180 1182 else: 1181 1183 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ -
src/sas/sascalc/dataloader/file_reader_base_class.py
r7b50f14 r9e6aeaf 197 197 dataset.x_bins = dataset.qx_data[:int(n_cols)] 198 198 dataset.data = dataset.data.flatten() 199 if len(dataset.data) > 0: 200 dataset.xmin = np.min(dataset.qx_data) 201 dataset.xmax = np.max(dataset.qx_data) 202 dataset.ymin = np.min(dataset.qy_data) 203 dataset.ymax = np.max(dataset.qx_data) 199 204 200 205 def format_unit(self, unit=None): … … 221 226 self.output = [] 222 227 223 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False, 224 has_error_dxl=False, has_error_dxw=False): 228 def data_cleanup(self): 229 """ 230 Clean up the data sets and refresh everything 231 :return: None 232 """ 233 self.remove_empty_q_values() 234 self.send_to_output() # Combine datasets with DataInfo 235 self.current_datainfo = DataInfo() # Reset DataInfo 236 237 def remove_empty_q_values(self): 225 238 """ 226 239 Remove any point where Q == 0 227 240 """ 228 x = self.current_dataset.x 229 self.current_dataset.x = self.current_dataset.x[x != 0] 230 self.current_dataset.y = self.current_dataset.y[x != 0] 231 if has_error_dy: 232 self.current_dataset.dy = self.current_dataset.dy[x != 0] 233 if has_error_dx: 234 self.current_dataset.dx = self.current_dataset.dx[x != 0] 235 if has_error_dxl: 236 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 237 if has_error_dxw: 238 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 241 if isinstance(self.current_dataset, plottable_1D): 242 # Booleans for resolutions 243 has_error_dx = self.current_dataset.dx is not None 244 has_error_dxl = self.current_dataset.dxl is not None 245 has_error_dxw = self.current_dataset.dxw is not None 246 has_error_dy = self.current_dataset.dy is not None 247 # Create arrays of zeros for non-existent resolutions 248 if has_error_dxw and not has_error_dxl: 249 array_size = self.current_dataset.dxw.size - 1 250 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 251 np.zeros([array_size])) 252 has_error_dxl = True 253 elif has_error_dxl and not has_error_dxw: 254 array_size = self.current_dataset.dxl.size - 1 255 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 256 np.zeros([array_size])) 257 has_error_dxw = True 258 elif not has_error_dxl and not has_error_dxw and not has_error_dx: 259 array_size = self.current_dataset.x.size - 1 260 self.current_dataset.dx = np.append(self.current_dataset.dx, 261 np.zeros([array_size])) 262 has_error_dx = True 263 if not has_error_dy: 264 array_size = self.current_dataset.y.size - 1 265 self.current_dataset.dy = np.append(self.current_dataset.dy, 266 np.zeros([array_size])) 267 has_error_dy = True 268 269 # Remove points where q = 0 270 x = self.current_dataset.x 271 self.current_dataset.x = self.current_dataset.x[x != 0] 272 self.current_dataset.y = self.current_dataset.y[x != 0] 273 if has_error_dy: 274 self.current_dataset.dy = self.current_dataset.dy[x != 0] 275 if has_error_dx: 276 self.current_dataset.dx = self.current_dataset.dx[x != 0] 277 if has_error_dxl: 278 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 279 if has_error_dxw: 280 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 281 elif isinstance(self.current_dataset, plottable_2D): 282 has_error_dqx = self.current_dataset.dqx_data is not None 283 has_error_dqy = self.current_dataset.dqy_data is not None 284 has_error_dy = self.current_dataset.err_data is not None 285 has_mask = self.current_dataset.mask is not None 286 x = self.current_dataset.qx_data 287 self.current_dataset.data = self.current_dataset.data[x != 0] 288 self.current_dataset.qx_data = self.current_dataset.qx_data[x != 0] 289 self.current_dataset.qy_data = self.current_dataset.qy_data[x != 0] 290 self.current_dataset.q_data = np.sqrt( 291 np.square(self.current_dataset.qx_data) + np.square( 292 self.current_dataset.qy_data)) 293 if has_error_dy: 294 self.current_dataset.err_data = self.current_dataset.err_data[x != 0] 295 if has_error_dqx: 296 self.current_dataset.dqx_data = self.current_dataset.dqx_data[x != 0] 297 if has_error_dqy: 298 self.current_dataset.dqy_data = self.current_dataset.dqy_data[x != 0] 299 if has_mask: 300 self.current_dataset.mask = self.current_dataset.mask[x != 0] 239 301 240 302 def reset_data_list(self, no_lines=0): -
src/sas/sascalc/dataloader/readers/abs_reader.py
r46cf4c9 r9e6aeaf 104 104 # Sample thickness in mm 105 105 try: 106 value = float(line_toks[5]) 106 # ABS writer adds 'C' with no space to the end of the 107 # thickness column. Remove it if it is there before 108 # converting the thickness. 109 if line_toks[5][:-1] not in '012345679.': 110 value = float(line_toks[5][:-1]) 111 else: 112 value = float(line_toks[5]) 107 113 if self.current_datainfo.sample.thickness_unit != 'cm': 108 114 conv = Converter('cm') … … 196 202 is_data_started = True 197 203 198 self.remove_empty_q_values( True, True)204 self.remove_empty_q_values() 199 205 200 206 # Sanity check -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rf7d720f r9e6aeaf 156 156 raise FileContentsException(msg) 157 157 158 self.remove_empty_q_values( has_error_dx, has_error_dy)158 self.remove_empty_q_values() 159 159 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 160 160 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r9efdb29 r9e6aeaf 100 100 xml_file = self.f_open.name 101 101 # We don't sure f_open since lxml handles opnening/closing files 102 if not self.f_open.closed:103 self.f_open.close()104 105 basename, _ = os.path.splitext(os.path.basename(xml_file))106 107 102 try: 108 103 # Raises FileContentsException 109 104 self.load_file_and_schema(xml_file, schema_path) 110 self.current_datainfo = DataInfo() 111 # Raises FileContentsException if file doesn't meet CanSAS schema 105 # Parse each SASentry 106 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', 107 namespaces={ 108 'ns': self.cansas_defaults.get( 109 "ns") 110 }) 112 111 self.is_cansas(self.extension) 113 self.invalid = False # If we reach this point then file must be valid CanSAS114 115 # Parse each SASentry116 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', namespaces={117 'ns': self.cansas_defaults.get("ns")118 })119 # Look for a SASentry120 self.names.append("SASentry")121 112 self.set_processing_instructions() 122 123 113 for entry in entry_list: 124 self.current_datainfo.filename = basename + self.extension125 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D"126 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions127 114 self._parse_entry(entry) 128 115 self.data_cleanup() … … 146 133 invalid_xml = self.find_invalid_xml() 147 134 if invalid_xml != "": 135 basename, _ = os.path.splitext( 136 os.path.basename(self.f_open.name)) 148 137 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 149 138 raise DataReaderException(invalid_xml) # Handled by base class … … 160 149 except Exception as e: # Convert all other exceptions to FileContentsExceptions 161 150 raise FileContentsException(str(e)) 162 151 finally: 152 if not self.f_open.closed: 153 self.f_open.close() 163 154 164 155 def load_file_and_schema(self, xml_file, schema_path=""): … … 205 196 if not self._is_call_local() and not recurse: 206 197 self.reset_state() 198 if not recurse: 199 self.current_datainfo = DataInfo() 200 # Raises FileContentsException if file doesn't meet CanSAS schema 201 self.invalid = False 202 # Look for a SASentry 207 203 self.data = [] 208 self. current_datainfo = DataInfo()204 self.parent_class = "SASentry" 209 205 self.names.append("SASentry") 210 self.parent_class = "SASentry" 206 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 207 self.current_datainfo.meta_data[ 208 PREPROCESS] = self.processing_instructions 209 if self._is_call_local() and not recurse: 210 basename, _ = os.path.splitext(os.path.basename(self.f_open.name)) 211 self.current_datainfo.filename = basename + self.extension 211 212 # Create an empty dataset if no data has been passed to the reader 212 213 if self.current_dataset is None: 213 self.current_dataset = plottable_1D(np.empty(0), np.empty(0), 214 np.empty(0), np.empty(0)) 214 self._initialize_new_data_set(dom) 215 215 self.base_ns = "{" + CANSAS_NS.get(self.cansas_version).get("ns") + "}" 216 216 … … 224 224 tagname_original = tagname 225 225 # Skip this iteration when loading in save state information 226 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant":226 if tagname in ["fitting_plug_in", "pr_inversion", "invariant", "corfunc"]: 227 227 continue 228 228 # Get where to store content … … 254 254 self._add_intermediate() 255 255 else: 256 # TODO: Clean this up to make it faster (fewer if/elifs) 256 257 if isinstance(self.current_dataset, plottable_2D): 257 258 data_point = node.text … … 498 499 self.sort_two_d_data() 499 500 self.reset_data_list() 500 empty = None 501 return self.output[0], empty 502 503 def data_cleanup(self): 504 """ 505 Clean up the data sets and refresh everything 506 :return: None 507 """ 508 has_error_dx = self.current_dataset.dx is not None 509 has_error_dxl = self.current_dataset.dxl is not None 510 has_error_dxw = self.current_dataset.dxw is not None 511 has_error_dy = self.current_dataset.dy is not None 512 self.remove_empty_q_values(has_error_dx=has_error_dx, 513 has_error_dxl=has_error_dxl, 514 has_error_dxw=has_error_dxw, 515 has_error_dy=has_error_dy) 516 self.send_to_output() # Combine datasets with DataInfo 517 self.current_datainfo = DataInfo() # Reset DataInfo 501 return self.output[0], None 518 502 519 503 def _is_call_local(self): … … 549 533 self.aperture = Aperture() 550 534 elif self.parent_class == 'SASdata': 551 self._check_for_empty_resolution()552 535 self.data.append(self.current_dataset) 553 536 … … 605 588 if 'unit' in attr and attr.get('unit') is not None: 606 589 try: 607 local_unit = attr['unit'] 590 unit = attr['unit'] 591 unit_list = unit.split("|") 592 if len(unit_list) > 1: 593 self.current_dataset.xaxis(unit_list[0].strip(), 594 unit_list[1].strip()) 595 local_unit = unit_list[1] 596 else: 597 local_unit = unit 608 598 unitname = self.ns_list.current_level.get("unit", "") 609 599 if "SASdetector" in self.names: … … 659 649 return node_value, value_unit 660 650 661 def _check_for_empty_resolution(self):662 """663 a method to check all resolution data sets are the same size as I and q664 """665 dql_exists = False666 dqw_exists = False667 dq_exists = False668 di_exists = False669 if self.current_dataset.dxl is not None:670 dql_exists = True671 if self.current_dataset.dxw is not None:672 dqw_exists = True673 if self.current_dataset.dx is not None:674 dq_exists = True675 if self.current_dataset.dy is not None:676 di_exists = True677 if dqw_exists and not dql_exists:678 array_size = self.current_dataset.dxw.size679 self.current_dataset.dxl = np.zeros(array_size)680 elif dql_exists and not dqw_exists:681 array_size = self.current_dataset.dxl.size682 self.current_dataset.dxw = np.zeros(array_size)683 elif not dql_exists and not dqw_exists and not dq_exists:684 array_size = self.current_dataset.x.size685 self.current_dataset.dx = np.append(self.current_dataset.dx,686 np.zeros([array_size]))687 if not di_exists:688 array_size = self.current_dataset.y.size689 self.current_dataset.dy = np.append(self.current_dataset.dy,690 np.zeros([array_size]))691 692 651 def _initialize_new_data_set(self, node=None): 693 652 if node is not None: -
src/sas/sascalc/fit/pagestate.py
r574adc7 r9e6aeaf 1226 1226 namespaces=CANSAS_NS) 1227 1227 for entry in entry_list: 1228 try:1229 sas_entry, _ = self._parse_save_state_entry(entry)1230 except:1231 raise1232 1228 fitstate = self._parse_state(entry) 1233 1234 1229 # state could be None when .svs file is loaded 1235 1230 # in this case, skip appending to output 1236 1231 if fitstate is not None: 1232 try: 1233 sas_entry, _ = self._parse_save_state_entry( 1234 entry) 1235 except: 1236 raise 1237 1237 sas_entry.meta_data['fitstate'] = fitstate 1238 1238 sas_entry.filename = fitstate.file -
src/sas/sascalc/calculator/BaseComponent.py
r9a5097c r574adc7 143 143 qdist[1].__class__.__name__ != 'ndarray': 144 144 msg = "evalDistribution expects a list of 2 ndarrays" 145 raise RuntimeError , msg145 raise RuntimeError(msg) 146 146 147 147 # Extract qx and qy for code clarity … … 167 167 mesg = "evalDistribution is expecting an ndarray of scalar q-values" 168 168 mesg += " or a list [qx,qy] where qx,qy are 2D ndarrays." 169 raise RuntimeError , mesg169 raise RuntimeError(mesg) 170 170 171 171 … … 228 228 return 229 229 230 raise ValueError , "Model does not contain parameter %s" % name230 raise ValueError("Model does not contain parameter %s" % name) 231 231 232 232 def getParam(self, name): … … 250 250 return self.params[item] 251 251 252 raise ValueError , "Model does not contain parameter %s" % name252 raise ValueError("Model does not contain parameter %s" % name) 253 253 254 254 def getParamList(self): … … 294 294 add 295 295 """ 296 raise ValueError , "Model operation are no longer supported"296 raise ValueError("Model operation are no longer supported") 297 297 def __sub__(self, other): 298 298 """ 299 299 sub 300 300 """ 301 raise ValueError , "Model operation are no longer supported"301 raise ValueError("Model operation are no longer supported") 302 302 def __mul__(self, other): 303 303 """ 304 304 mul 305 305 """ 306 raise ValueError , "Model operation are no longer supported"306 raise ValueError("Model operation are no longer supported") 307 307 def __div__(self, other): 308 308 """ 309 309 div 310 310 """ 311 raise ValueError , "Model operation are no longer supported"311 raise ValueError("Model operation are no longer supported") 312 312 313 313 -
src/sas/sascalc/calculator/c_extensions/sld2i_module.cpp
rb523c0e r1d014cb 5 5 #include <stdio.h> 6 6 #include <sld2i.hh> 7 8 #if PY_MAJOR_VERSION < 3 9 typedef void (*PyCapsule_Destructor)(PyObject *); 10 typedef void (*PyCObject_Destructor)(void *); 11 #define PyCapsule_New(pointer, name, destructor) (PyCObject_FromVoidPtr(pointer, (PyCObject_Destructor)destructor)) 12 #define PyCapsule_GetPointer(capsule, name) (PyCObject_AsVoidPtr(capsule)) 13 #endif 14 7 15 8 16 // Utilities … … 25 33 * Delete a GenI object 26 34 */ 27 void del_sld2i(void *ptr){ 28 GenI* sld2i = static_cast<GenI *>(ptr); 35 void 36 del_sld2i(PyObject *obj){ 37 GenI* sld2i = static_cast<GenI *>(PyCapsule_GetPointer(obj, "GenI")); 29 38 delete sld2i; 30 39 return; … … 71 80 OUTVECTOR(vol_pix_obj, vol_pix, n_x); 72 81 GenI* sld2i = new GenI(n_pix,x_val,y_val,z_val,sldn_val,mx_val,my_val,mz_val,vol_pix,inspin,outspin,stheta); 73 return PyC Object_FromVoidPtr(sld2i, del_sld2i);82 return PyCapsule_New(sld2i, "GenI", del_sld2i); 74 83 } 75 84 … … 97 106 98 107 // Set the array pointers 99 void *temp = PyC Object_AsVoidPtr(gen_obj);108 void *temp = PyCapsule_GetPointer(gen_obj, "GenI"); 100 109 GenI* s = static_cast<GenI *>(temp); 101 110 … … 125 134 126 135 // Set the array pointers 127 void *temp = PyC Object_AsVoidPtr(gen_obj);136 void *temp = PyCapsule_GetPointer(gen_obj, "GenI"); 128 137 GenI* s = static_cast<GenI *>(temp); 129 138 … … 146 155 }; 147 156 148 149 #ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ 150 #define PyMODINIT_FUNC void 157 #define MODULE_DOC "Sld2i C Library" 158 #define MODULE_NAME "sld2i" 159 #define MODULE_INIT2 initsld2i 160 #define MODULE_INIT3 PyInit_sld2i 161 #define MODULE_METHODS module_methods 162 163 /* ==== boilerplate python 2/3 interface bootstrap ==== */ 164 165 166 #if defined(WIN32) && !defined(__MINGW32__) 167 #define DLL_EXPORT __declspec(dllexport) 168 #else 169 #define DLL_EXPORT 151 170 #endif 152 PyMODINIT_FUNC 153 initsld2i(void) 154 { 155 Py_InitModule3("sld2i", module_methods, "Sld2i module"); 156 } 171 172 #if PY_MAJOR_VERSION >= 3 173 174 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT3(void) 175 { 176 static struct PyModuleDef moduledef = { 177 PyModuleDef_HEAD_INIT, 178 MODULE_NAME, /* m_name */ 179 MODULE_DOC, /* m_doc */ 180 -1, /* m_size */ 181 MODULE_METHODS, /* m_methods */ 182 NULL, /* m_reload */ 183 NULL, /* m_traverse */ 184 NULL, /* m_clear */ 185 NULL, /* m_free */ 186 }; 187 return PyModule_Create(&moduledef); 188 } 189 190 #else /* !PY_MAJOR_VERSION >= 3 */ 191 192 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT2(void) 193 { 194 Py_InitModule4(MODULE_NAME, 195 MODULE_METHODS, 196 MODULE_DOC, 197 0, 198 PYTHON_API_VERSION 199 ); 200 } 201 202 #endif /* !PY_MAJOR_VERSION >= 3 */ -
src/sas/sascalc/calculator/instrument.py
r9a5097c r574adc7 222 222 """ 223 223 # check if the wavelength is in range 224 if min(band) < self.min or\ 225 max(band) > self.max: 226 raise 224 if min(band) < self.min or max(band) > self.max: 225 raise ValueError("band out of range") 227 226 self.band = band 228 227 … … 239 238 """ 240 239 # check if the wavelength is in range 241 if wavelength < min(self.band) or\ 242 wavelength > max(self.band): 243 raise 240 if wavelength < min(self.band) or wavelength > max(self.band): 241 raise ValueError("wavelength out of range") 244 242 self.wavelength = wavelength 245 243 validate(wavelength) … … 324 322 plt.show() 325 323 except: 326 raise RuntimeError , "Can't import matplotlib required to plot..."324 raise RuntimeError("Can't import matplotlib required to plot...") 327 325 328 326 -
src/sas/sascalc/calculator/resolution_calculator.py
r7432acb r574adc7 4 4 instrumental parameters. 5 5 """ 6 from instrument import Sample 7 from instrument import Detector 8 from instrument import TOF as Neutron 9 from instrument import Aperture 10 # import math stuffs 11 from math import pi 12 from math import sqrt 6 import sys 7 from math import pi, sqrt 13 8 import math 9 import logging 10 14 11 import numpy as np 15 import sys 16 import logging 12 13 from .instrument import Sample 14 from .instrument import Detector 15 from .instrument import TOF as Neutron 16 from .instrument import Aperture 17 17 18 18 logger = logging.getLogger(__name__) … … 208 208 if wavelength == 0: 209 209 msg = "Can't compute the resolution: the wavelength is zero..." 210 raise RuntimeError , msg210 raise RuntimeError(msg) 211 211 return self.intensity 212 212 … … 379 379 if qx_min < self.qx_min: 380 380 self.qx_min = qx_min 381 #raise ValueError , msg381 #raise ValueError(msg) 382 382 if qx_max > self.qx_max: 383 383 self.qx_max = qx_max 384 #raise ValueError , msg384 #raise ValueError(msg) 385 385 if qy_min < self.qy_min: 386 386 self.qy_min = qy_min 387 #raise ValueError , msg387 #raise ValueError(msg) 388 388 if qy_max > self.qy_max: 389 389 self.qy_max = qy_max 390 #raise ValueError , msg390 #raise ValueError(msg) 391 391 if not full_cal: 392 392 return None … … 503 503 # otherwise 504 504 else: 505 raise ValueError , " Improper input..."505 raise ValueError(" Improper input...") 506 506 # get them squared 507 507 sigma = x_comp * x_comp … … 706 706 #self.set_wavelength(wavelength) 707 707 else: 708 raise 708 raise TypeError("invalid wavlength---should be list or float") 709 709 710 710 def set_wave_spread(self, wavelength_spread): … … 717 717 self.wave.set_wave_spread_list([wavelength_spread]) 718 718 else: 719 raise 719 raise TypeError("invalid wavelength spread---should be list or float") 720 720 721 721 def set_wavelength(self, wavelength): … … 766 766 """ 767 767 if len(size) < 1 or len(size) > 2: 768 raise RuntimeError , "The length of the size must be one or two."768 raise RuntimeError("The length of the size must be one or two.") 769 769 self.aperture.set_source_size(size) 770 770 … … 783 783 """ 784 784 if len(size) < 1 or len(size) > 2: 785 raise RuntimeError , "The length of the size must be one or two."785 raise RuntimeError("The length of the size must be one or two.") 786 786 self.aperture.set_sample_size(size) 787 787 … … 806 806 """ 807 807 if len(distance) < 1 or len(distance) > 2: 808 raise RuntimeError , "The length of the size must be one or two."808 raise RuntimeError("The length of the size must be one or two.") 809 809 self.aperture.set_sample_distance(distance) 810 810 … … 816 816 """ 817 817 if len(distance) < 1 or len(distance) > 2: 818 raise RuntimeError , "The length of the size must be one or two."818 raise RuntimeError("The length of the size must be one or two.") 819 819 self.sample.set_distance(distance) 820 820 … … 826 826 """ 827 827 if len(distance) < 1 or len(distance) > 2: 828 raise RuntimeError , "The length of the size must be one or two."828 raise RuntimeError("The length of the size must be one or two.") 829 829 self.detector.set_distance(distance) 830 830 … … 998 998 pix_y_size = detector_pix_size[1] 999 999 else: 1000 raise ValueError , " Input value format error..."1000 raise ValueError(" Input value format error...") 1001 1001 # Sample to detector distance = sample slit to detector 1002 1002 # minus sample offset -
src/sas/sascalc/calculator/sas_gen.py
rf2ea95a r1d014cb 5 5 from __future__ import print_function 6 6 7 import sas.sascalc.calculator.core.sld2i as mod 8 from sas.sascalc.calculator.BaseComponent import BaseComponent 7 import os 8 import sys 9 import copy 10 import logging 11 9 12 from periodictable import formula 10 13 from periodictable import nsf 11 14 import numpy as np 12 import os 13 import copy 14 import sys 15 import logging 15 16 from .core import sld2i as mod 17 from .BaseComponent import BaseComponent 16 18 17 19 logger = logging.getLogger(__name__) 20 21 if sys.version_info[0] < 3: 22 def decode(s): 23 return s 24 else: 25 def decode(s): 26 return s.decode() if isinstance(s, bytes) else s 18 27 19 28 MFACTOR_AM = 2.853E-12 … … 34 43 factor = MFACTOR_MT 35 44 else: 36 raise ValueError , "Invalid valueunit"45 raise ValueError("Invalid valueunit") 37 46 sld_m = factor * mag 38 47 return sld_m … … 100 109 """ 101 110 if self.data_vol is None: 102 raise 111 raise TypeError("data_vol is missing") 103 112 self.data_vol = volume 104 113 … … 174 183 if len(x[1]) > 0: 175 184 msg = "Not a 1D." 176 raise ValueError , msg185 raise ValueError(msg) 177 186 i_out = np.zeros_like(x[0]) 178 187 # 1D I is found at y =0 in the 2D pattern … … 181 190 else: 182 191 msg = "Q must be given as list of qx's and qy's" 183 raise ValueError , msg192 raise ValueError(msg) 184 193 185 194 def runXY(self, x=0.0): … … 196 205 else: 197 206 msg = "Q must be given as list of qx's and qy's" 198 raise ValueError , msg207 raise ValueError(msg) 199 208 200 209 def evalDistribution(self, qdist): … … 214 223 mesg = "evalDistribution is expecting an ndarray of " 215 224 mesg += "a list [qx,qy] where qx,qy are arrays." 216 raise RuntimeError , mesg225 raise RuntimeError(mesg) 217 226 218 227 class OMF2SLD(object): … … 288 297 z_dir2 *= z_dir2 289 298 mask = (x_dir2 + y_dir2 + z_dir2) <= 1.0 290 except :299 except Exception: 291 300 logger.error(sys.exc_value) 292 301 self.output = MagSLD(self.pos_x[mask], self.pos_y[mask], … … 313 322 :Params length: data length 314 323 """ 315 msg = "Error: Inconsistent data length." 316 if len(self.pos_x) != length: 317 raise ValueError, msg 318 if len(self.pos_y) != length: 319 raise ValueError, msg 320 if len(self.pos_z) != length: 321 raise ValueError, msg 322 if len(self.mx) != length: 323 raise ValueError, msg 324 if len(self.my) != length: 325 raise ValueError, msg 326 if len(self.mz) != length: 327 raise ValueError, msg 324 parts = (self.pos_x, self.pos_y, self.pos_z, self.mx, self.my, self.mz) 325 if any(len(v) != length for v in parts): 326 raise ValueError("Error: Inconsistent data length.") 328 327 329 328 def remove_null_points(self, remove=False, recenter=False): … … 378 377 try: 379 378 input_f = open(path, 'rb') 380 buff = input_f.read()379 buff = decode(input_f.read()) 381 380 lines = buff.split('\n') 382 381 input_f.close() … … 384 383 valueunit = None 385 384 for line in lines: 386 toks = line.split()385 line = line.strip() 387 386 # Read data 388 try: 389 _mx = float(toks[0]) 390 _my = float(toks[1]) 391 _mz = float(toks[2]) 392 _mx = mag2sld(_mx, valueunit) 393 _my = mag2sld(_my, valueunit) 394 _mz = mag2sld(_mz, valueunit) 395 mx = np.append(mx, _mx) 396 my = np.append(my, _my) 397 mz = np.append(mz, _mz) 398 except: 399 # Skip non-data lines 400 logger.error(sys.exc_value) 387 if line and not line.startswith('#'): 388 try: 389 toks = line.split() 390 _mx = float(toks[0]) 391 _my = float(toks[1]) 392 _mz = float(toks[2]) 393 _mx = mag2sld(_mx, valueunit) 394 _my = mag2sld(_my, valueunit) 395 _mz = mag2sld(_mz, valueunit) 396 mx = np.append(mx, _mx) 397 my = np.append(my, _my) 398 mz = np.append(mz, _mz) 399 except Exception as exc: 400 # Skip non-data lines 401 logger.error(str(exc)+" when processing %r"%line) 401 402 #Reading Header; Segment count ignored 402 403 s_line = line.split(":", 1) … … 415 416 msg = "Error: \n" 416 417 msg += "We accept only m as meshunit" 417 raise ValueError , msg418 raise ValueError(msg) 418 419 if s_line[0].lower().count("xbase") > 0: 419 420 xbase = s_line[1].lstrip() … … 482 483 output.set_m(mx, my, mz) 483 484 return output 484 except :485 except Exception: 485 486 msg = "%s is not supported: \n" % path 486 487 msg += "We accept only Text format OMF file." 487 raise RuntimeError , msg488 raise RuntimeError(msg) 488 489 489 490 class PDBReader(object): … … 522 523 try: 523 524 input_f = open(path, 'rb') 524 buff = input_f.read()525 buff = decode(input_f.read()) 525 526 lines = buff.split('\n') 526 527 input_f.close() … … 536 537 float(line[12]) 537 538 atom_name = atom_name[1].upper() 538 except :539 except Exception: 539 540 if len(atom_name) == 4: 540 541 atom_name = atom_name[0].upper() … … 559 560 vol = 1.0e+24 * atom.mass / atom.density / NA 560 561 vol_pix = np.append(vol_pix, vol) 561 except :562 except Exception: 562 563 print("Error: set the sld of %s to zero"% atom_name) 563 564 sld_n = np.append(sld_n, 0.0) … … 573 574 try: 574 575 int_val = int(val) 575 except :576 except Exception: 576 577 break 577 578 if int_val == 0: … … 592 593 y_lines.append(y_line) 593 594 z_lines.append(z_line) 594 except :595 except Exception: 595 596 logger.error(sys.exc_value) 596 597 … … 604 605 output.sld_unit = '1/A^(2)' 605 606 return output 606 except :607 raise RuntimeError , "%s is not a sld file" % path607 except Exception: 608 raise RuntimeError("%s is not a sld file" % path) 608 609 609 610 def write(self, path, data): … … 657 658 elif ncols == 7: 658 659 vol_pix = None 659 except :660 except Exception: 660 661 # For older version of numpy 661 662 input_f = open(path, 'rb') 662 buff = input_f.read()663 buff = decode(input_f.read()) 663 664 lines = buff.split('\n') 664 665 input_f.close() … … 683 684 _vol_pix = float(toks[7]) 684 685 vol_pix = np.append(vol_pix, _vol_pix) 685 except :686 except Exception: 686 687 vol_pix = None 687 except :688 except Exception: 688 689 # Skip non-data lines 689 690 logger.error(sys.exc_value) … … 696 697 output.set_pixel_volumes(vol_pix) 697 698 return output 698 except :699 raise RuntimeError , "%s is not a sld file" % path699 except Exception: 700 raise RuntimeError("%s is not a sld file" % path) 700 701 701 702 def write(self, path, data): … … 706 707 """ 707 708 if path is None: 708 raise ValueError , "Missing the file path."709 raise ValueError("Missing the file path.") 709 710 if data is None: 710 raise ValueError , "Missing the data to save."711 raise ValueError("Missing the data to save.") 711 712 x_val = data.pos_x 712 713 y_val = data.pos_y … … 977 978 self.ynodes = int(ydist) + 1 978 979 self.znodes = int(zdist) + 1 979 except :980 except Exception: 980 981 self.xnodes = None 981 982 self.ynodes = None … … 1012 1013 self.set_pixel_volumes(vol) 1013 1014 self.has_stepsize = True 1014 except :1015 except Exception: 1015 1016 self.xstepsize = None 1016 1017 self.ystepsize = None … … 1057 1058 reader = SLDReader() 1058 1059 oreader = OMFReader() 1059 output = reader.read(tfpath)1060 ooutput = oreader.read(ofpath)1060 output = decode(reader.read(tfpath)) 1061 ooutput = decode(oreader.read(ofpath)) 1061 1062 foutput = OMF2SLD() 1062 1063 foutput.set_data(ooutput) … … 1099 1100 break 1100 1101 oreader = OMFReader() 1101 ooutput = oreader.read(ofpath)1102 ooutput = decode(oreader.read(ofpath)) 1102 1103 foutput = OMF2SLD() 1103 1104 foutput.set_data(ooutput) -
src/sas/sascalc/corfunc/corfunc_calculator.py
ra859f99 r574adc7 88 88 # Only process data of the class Data1D 89 89 if not issubclass(data.__class__, Data1D): 90 raise ValueError , "Data must be of the type DataLoader.Data1D"90 raise ValueError("Data must be of the type DataLoader.Data1D") 91 91 92 92 # Prepare the data … … 161 161 err = ("Incorrect transform type supplied, must be 'fourier'", 162 162 " or 'hilbert'") 163 raise ValueError , err163 raise ValueError(err) 164 164 165 165 self._transform_thread.queue() -
src/sas/sascalc/data_util/calcthread.py
ra1b8fee r574adc7 6 6 from __future__ import print_function 7 7 8 import thread9 8 import traceback 10 9 import sys 11 10 import logging 11 try: 12 import _thread as thread 13 except ImportError: # CRUFT: python 2 support 14 import thread 12 15 13 16 if sys.platform.count("darwin") > 0: 14 17 import time 15 18 stime = time.time() 16 19 17 20 def clock(): 18 21 return time.time() - stime 19 22 20 23 def sleep(t): 21 24 return time.sleep(t) … … 35 38 CalcThread.__init__, passing it the keyword arguments for 36 39 yieldtime, worktime, update and complete. 37 40 38 41 When defining the compute() method you need to include code which 39 42 allows the GUI to run. They are as follows: :: … … 211 214 self._lock.release() 212 215 self._time_for_update += 1e6 # No more updates 213 216 214 217 self.updatefn(**kwargs) 215 218 sleep(self.yieldtime) -
src/sas/sascalc/data_util/nxsunit.py
r8e9536f r574adc7 13 13 in the NeXus definition files. 14 14 15 Unlike other units packages, this package does not carry the units along with 15 Unlike other units packages, this package does not carry the units along with 16 16 the value but merely provides a conversion function for transforming values. 17 17 … … 68 68 Ack! Allows, e.g., Coulomb and coulomb even though Coulomb is not 69 69 a unit because some NeXus files store it that way! 70 70 71 71 Returns a dictionary of names and scales. 72 72 """ … … 78 78 n=1e-9,p=1e-12,f=1e-15) 79 79 map = {abbr:1} 80 map.update([(P+abbr,scale) for (P,scale) in short_prefix.ite ritems()])80 map.update([(P+abbr,scale) for (P,scale) in short_prefix.items()]) 81 81 for name in [unit,unit.capitalize()]: 82 82 map.update({name:1,name+'s':1}) 83 map.update([(P+name,scale) for (P,scale) in prefix.ite ritems()])84 map.update([(P+'*'+name,scale) for (P,scale) in prefix.ite ritems()])85 map.update([(P+name+'s',scale) for (P,scale) in prefix.ite ritems()])83 map.update([(P+name,scale) for (P,scale) in prefix.items()]) 84 map.update([(P+'*'+name,scale) for (P,scale) in prefix.items()]) 85 map.update([(P+name+'s',scale) for (P,scale) in prefix.items()]) 86 86 return map 87 87 … … 91 91 """ 92 92 map = {} 93 map.update([(name,scale) for name,scale in kw.ite ritems()])94 map.update([(name+'s',scale) for name,scale in kw.ite ritems()])93 map.update([(name,scale) for name,scale in kw.items()]) 94 map.update([(name+'s',scale) for name,scale in kw.items()]) 95 95 return map 96 96 … … 101 101 * WARNING * this will incorrect transform 10^3 to 103. 102 102 """ 103 s.update((k.replace('^',''),v) 104 for k, v in s.items()103 s.update((k.replace('^',''),v) 104 for k, v in list(s.items()) 105 105 if '^' in k) 106 106 … … 130 130 temperature.update(_build_metric_units('Celcius', 'C')) 131 131 temperature.update(_build_metric_units('celcius', 'C')) 132 132 133 133 charge = _build_metric_units('coulomb','C') 134 134 charge.update({'microAmp*hour':0.0036}) 135 135 136 136 sld = { '10^-6 Angstrom^-2': 1e-6, 'Angstrom^-2': 1 } 137 Q = { 'invA': 1, 'invAng': 1, 'invAngstroms': 1, '1/A': 1, 137 Q = { 'invA': 1, 'invAng': 1, 'invAngstroms': 1, '1/A': 1, 138 138 '10^-3 Angstrom^-1': 1e-3, '1/cm': 1e-8, '1/m': 1e-10, 139 139 'nm^-1': 0.1, '1/nm': 0.1, 'n_m^-1': 0.1 } … … 189 189 190 190 def _check(expect,get): 191 if expect != get: raise ValueError, "Expected %s but got %s"%(expect,get) 191 if expect != get: 192 raise ValueError("Expected %s but got %s"%(expect, get)) 192 193 #print expect,"==",get 193 194 … … 202 203 _check(123,Converter('a.u.')(123,units='s')) # arbitrary units always returns the same value 203 204 _check(123,Converter('a.u.')(123,units='')) # arbitrary units always returns the same value 204 try: Converter('help') 205 except KeyError: pass 206 else: raise Exception("unknown unit did not raise an error") 205 try: 206 Converter('help') 207 except KeyError: 208 pass 209 else: 210 raise Exception("unknown unit did not raise an error") 207 211 208 212 # TODO: more tests -
src/sas/sascalc/data_util/odict.py
rb699768 r574adc7 39 39 """ 40 40 A class of dictionary that keeps the insertion order of keys. 41 41 42 42 All appropriate methods return keys, items, or values in an ordered way. 43 43 44 44 All normal dictionary methods are available. Update and comparison is 45 45 restricted to other OrderedDict objects. 46 46 47 47 Various sequence methods are available, including the ability to explicitly 48 48 mutate the key ordering. 49 49 50 50 __contains__ tests: 51 51 52 52 >>> d = OrderedDict(((1, 3),)) 53 53 >>> 1 in d … … 55 55 >>> 4 in d 56 56 0 57 57 58 58 __getitem__ tests: 59 59 60 60 >>> OrderedDict(((1, 3), (3, 2), (2, 1)))[2] 61 61 1 … … 63 63 Traceback (most recent call last): 64 64 KeyError: 4 65 65 66 66 __len__ tests: 67 67 68 68 >>> len(OrderedDict()) 69 69 0 70 70 >>> len(OrderedDict(((1, 3), (3, 2), (2, 1)))) 71 71 3 72 72 73 73 get tests: 74 74 75 75 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 76 76 >>> d.get(1) … … 82 82 >>> d 83 83 OrderedDict([(1, 3), (3, 2), (2, 1)]) 84 84 85 85 has_key tests: 86 86 87 87 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 88 88 >>> d.has_key(1) … … 96 96 Create a new ordered dictionary. Cannot init from a normal dict, 97 97 nor from kwargs, since items order is undefined in those cases. 98 98 99 99 If the ``strict`` keyword argument is ``True`` (``False`` is the 100 100 default) then when doing slice assignment - the ``OrderedDict`` you are 101 101 assigning from *must not* contain any keys in the remaining dict. 102 102 103 103 >>> OrderedDict() 104 104 OrderedDict([]) … … 283 283 """ 284 284 Used for __repr__ and __str__ 285 285 286 286 >>> r1 = repr(OrderedDict((('a', 'b'), ('c', 'd'), ('e', 'f')))) 287 287 >>> r1 … … 321 321 >>> d 322 322 OrderedDict([(0, 1), (1, 2), (5, 6), (7, 8), (3, 4)]) 323 323 324 324 >>> a = OrderedDict(((0, 1), (1, 2), (2, 3)), strict=True) 325 325 >>> a[3] = 4 … … 345 345 >>> a 346 346 OrderedDict([(3, 4), (2, 3), (1, 2), (0, 1)]) 347 347 348 348 >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)]) 349 349 >>> d[:1] = 3 350 350 Traceback (most recent call last): 351 351 TypeError: slice assignment requires an OrderedDict 352 352 353 353 >>> d = OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)]) 354 354 >>> d[:1] = OrderedDict([(9, 8)]) … … 444 444 """ 445 445 Implemented so that access to ``sequence`` raises a warning. 446 446 447 447 >>> d = OrderedDict() 448 448 >>> d.sequence … … 463 463 """ 464 464 To allow deepcopy to work with OrderedDict. 465 465 466 466 >>> from copy import deepcopy 467 467 >>> a = OrderedDict([(1, 1), (2, 2), (3, 3)]) … … 490 490 def items(self): 491 491 """ 492 ``items`` returns a list of tuples representing all the 492 ``items`` returns a list of tuples representing all the 493 493 ``(key, value)`` pairs in the dictionary. 494 494 495 495 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 496 496 >>> d.items() … … 505 505 """ 506 506 Return a list of keys in the ``OrderedDict``. 507 507 508 508 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 509 509 >>> d.keys() … … 515 515 """ 516 516 Return a list of all the values in the OrderedDict. 517 517 518 518 Optionally you can pass in a list of values, which will replace the 519 519 current list. The value list must be the same len as the OrderedDict. 520 520 521 521 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 522 522 >>> d.values() … … 596 596 """ 597 597 No dict.pop in Python 2.2, gotta reimplement it 598 598 599 599 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 600 600 >>> d.pop(3) … … 612 612 """ 613 613 if len(args) > 1: 614 raise TypeError ,('pop expected at most 2 arguments, got %s' %614 raise TypeError('pop expected at most 2 arguments, got %s' % 615 615 (len(args) + 1)) 616 616 if key in self: … … 628 628 Delete and return an item specified by index, not a random one as in 629 629 dict. The index is -1 by default (the last item). 630 630 631 631 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 632 632 >>> d.popitem() … … 674 674 """ 675 675 Update from another OrderedDict or sequence of (key, value) pairs 676 676 677 677 >>> d = OrderedDict(((1, 0), (0, 1))) 678 678 >>> d.update(OrderedDict(((1, 3), (3, 2), (2, 1)))) … … 706 706 """ 707 707 Rename the key for a given value, without modifying sequence order. 708 708 709 709 For the case where new_key already exists this raise an exception, 710 710 since if new_key exists, it is ambiguous as to what happens to the 711 711 associated values, and the position of new_key in the sequence. 712 712 713 713 >>> od = OrderedDict() 714 714 >>> od['a'] = 1 … … 732 732 raise ValueError("New key already exists: %r" % new_key) 733 733 # rename sequence entry 734 value = self[old_key] 734 value = self[old_key] 735 735 old_idx = self._sequence.index(old_key) 736 736 self._sequence[old_idx] = new_key … … 742 742 """ 743 743 This method allows you to set the items in the dict. 744 744 745 745 It takes a list of tuples - of the same sort returned by the ``items`` 746 746 method. 747 747 748 748 >>> d = OrderedDict() 749 749 >>> d.setitems(((3, 1), (2, 3), (1, 2))) … … 760 760 replace the current set. This must contain the same set of keys, but 761 761 need not be in the same order. 762 762 763 763 If you pass in new keys that don't match, a ``KeyError`` will be 764 764 raised. 765 765 766 766 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 767 767 >>> d.keys() … … 791 791 You can pass in a list of values, which will replace the 792 792 current list. The value list must be the same len as the OrderedDict. 793 793 794 794 (Or a ``ValueError`` is raised.) 795 795 796 796 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 797 797 >>> d.setvalues((1, 2, 3)) … … 813 813 """ 814 814 Return the position of the specified key in the OrderedDict. 815 815 816 816 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 817 817 >>> d.index(3) … … 826 826 """ 827 827 Takes ``index``, ``key``, and ``value`` as arguments. 828 828 829 829 Sets ``key`` to ``value``, so that ``key`` is at position ``index`` in 830 830 the OrderedDict. 831 831 832 832 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 833 833 >>> d.insert(0, 4, 0) … … 850 850 """ 851 851 Reverse the order of the OrderedDict. 852 852 853 853 >>> d = OrderedDict(((1, 3), (3, 2), (2, 1))) 854 854 >>> d.reverse() … … 861 861 """ 862 862 Sort the key order in the OrderedDict. 863 863 864 864 This method takes the same arguments as the ``list.sort`` method on 865 865 your version of Python. 866 866 867 867 >>> d = OrderedDict(((4, 1), (2, 2), (3, 3), (1, 4))) 868 868 >>> d.sort() … … 876 876 """ 877 877 Custom object for accessing the keys of an OrderedDict. 878 878 879 879 Can be called like the normal ``OrderedDict.keys`` method, but also 880 880 supports indexing and sequence methods. … … 897 897 You cannot assign to keys, but you can do slice assignment to re-order 898 898 them. 899 899 900 900 You can only do slice assignment if the new set of keys is a reordering 901 901 of the original set. … … 967 967 """ 968 968 Custom object for accessing the items of an OrderedDict. 969 969 970 970 Can be called like the normal ``OrderedDict.items`` method, but also 971 971 supports indexing and sequence methods. … … 1077 1077 """ 1078 1078 Custom object for accessing the values of an OrderedDict. 1079 1079 1080 1080 Can be called like the normal ``OrderedDict.values`` method, but also 1081 1081 supports indexing and sequence methods. … … 1099 1099 """ 1100 1100 Set the value at position i to value. 1101 1101 1102 1102 You can only do slice assignment to values if you supply a sequence of 1103 1103 equal length to the slice you are replacing. … … 1168 1168 Experimental version of OrderedDict that has a custom object for ``keys``, 1169 1169 ``values``, and ``items``. 1170 1170 1171 1171 These are callable sequence objects that work as methods, or can be 1172 1172 manipulated directly as sequences. 1173 1173 1174 1174 Test for ``keys``, ``items`` and ``values``. 1175 1175 1176 1176 >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4))) 1177 1177 >>> d … … 1293 1293 >>> d 1294 1294 SequenceOrderedDict([(1, 1), (2, 2), (3, 3)]) 1295 1295 1296 1296 >>> d = SequenceOrderedDict(((1, 2), (2, 3), (3, 4))) 1297 1297 >>> d -
src/sas/sascalc/data_util/registry.py
r5a8cdbb rdc8d1c2 101 101 extlist = [ext for ext in self.extensions() if path.endswith(ext)] 102 102 # Sort matching extensions by decreasing order of length 103 extlist.sort( lambda a,b: len(a)<len(b))103 extlist.sort(key=len) 104 104 # Combine loaders for matching extensions into one big list 105 105 loaders = [] -
src/sas/sascalc/data_util/uncertainty.py
r9a5097c r574adc7 2 2 Uncertainty propagation class for arithmetic, log and exp. 3 3 4 Based on scalars or numpy vectors, this class allows you to store and 4 Based on scalars or numpy vectors, this class allows you to store and 5 5 manipulate values+uncertainties, with propagation of gaussian error for 6 6 addition, subtraction, multiplication, division, power, exp and log. 7 7 8 8 Storage properties are determined by the numbers used to set the value 9 and uncertainty. Be sure to use floating point uncertainty vectors 9 and uncertainty. Be sure to use floating point uncertainty vectors 10 10 for inplace operations since numpy does not do automatic type conversion. 11 11 Normal operations can use mixed integer and floating point. In place … … 18 18 19 19 import numpy as np 20 import err1d 21 from formatnum import format_uncertainty 20 21 from .import err1d 22 from .formatnum import format_uncertainty 22 23 23 24 __all__ = ['Uncertainty'] … … 28 29 # Make standard deviation available 29 30 def _getdx(self): return np.sqrt(self.variance) 30 def _setdx(self,dx): 31 def _setdx(self,dx): 31 32 # Direct operation 32 33 # variance = dx**2 … … 38 39 # Constructor 39 40 def __init__(self, x, variance=None): 40 self.x, self.variance = x, variance 41 41 self.x, self.variance = x, variance 42 42 43 # Numpy array slicing operations 43 def __len__(self): 44 def __len__(self): 44 45 return len(self.x) 45 def __getitem__(self,key): 46 def __getitem__(self,key): 46 47 return Uncertainty(self.x[key],self.variance[key]) 47 48 def __setitem__(self,key,value): … … 137 138 def __idiv__(self, other): return self.__itruediv__(other) 138 139 139 140 140 141 # Unary ops 141 142 def __neg__(self): … … 151 152 return format_uncertainty(self.x,np.sqrt(self.variance)) 152 153 else: 153 return [format_uncertainty(v,dv) 154 return [format_uncertainty(v,dv) 154 155 for v,dv in zip(self.x,np.sqrt(self.variance))] 155 156 def __repr__(self): … … 219 220 z = a/4 220 221 assert z.x == 5./4 and z.variance == 3./4**2 221 222 222 223 # Reverse scalar operations 223 224 z = 4+a … … 229 230 z = 4/a 230 231 assert z.x == 4./5 and abs(z.variance - 3./5**4 * 4**2) < 1e-15 231 232 232 233 # Power operations 233 234 z = a**2 … … 250 251 assert z.x == 5./4 and abs(z.variance - (3./5**2 + 2./4**2)*(5./4)**2) < 1e-15 251 252 252 # ===== Inplace operations ===== 253 # ===== Inplace operations ===== 253 254 # Scalar operations 254 255 y = a+0; y += 4 … … 308 309 assert (z.x == 5./4).all() 309 310 assert (abs(z.variance - (3./5**2 + 2./4**2)*(5./4)**2) < 1e-15).all() 310 311 311 312 # printing; note that sqrt(3) ~ 1.7 312 313 assert str(Uncertainty(5,3)) == "5.0(17)" -
src/sas/sascalc/dataloader/__init__.py
rb699768 r574adc7 1 from data_info import *2 from manipulations import *3 from readers import *1 from .data_info import * 2 from .manipulations import * 3 from .readers import * -
src/sas/sascalc/dataloader/loader.py
rdcb91cf rdc8d1c2 26 26 import time 27 27 from zipfile import ZipFile 28 28 29 from sas.sascalc.data_util.registry import ExtensionRegistry 30 29 31 # Default readers are defined in the readers sub-module 30 import readers31 from loader_exceptions import NoKnownLoaderException, FileContentsException,\32 from . import readers 33 from .loader_exceptions import NoKnownLoaderException, FileContentsException,\ 32 34 DefaultReaderException 33 from readers import ascii_reader34 from readers import cansas_reader35 from readers import cansas_reader_HDF535 from .readers import ascii_reader 36 from .readers import cansas_reader 37 from .readers import cansas_reader_HDF5 36 38 37 39 logger = logging.getLogger(__name__) … … 73 75 try: 74 76 return super(Registry, self).load(path, format=format) 77 #except Exception: raise # for debugging, don't use fallback loader 75 78 except NoKnownLoaderException as nkl_e: 76 79 pass # Try the ASCII reader … … 327 330 extlist = [ext for ext in self.extensions() if path.endswith(ext)] 328 331 # Sort matching extensions by decreasing order of length 329 extlist.sort( lambda a, b: len(a) < len(b))332 extlist.sort(key=len) 330 333 # Combine loaders for matching extensions into one big list 331 334 writers = [] … … 341 344 # Raise an error if there are no matching extensions 342 345 if len(writers) == 0: 343 raise ValueError , "Unknown file type for " + path346 raise ValueError("Unknown file type for " + path) 344 347 # All done 345 348 return writers … … 360 363 try: 361 364 return fn(path, data) 362 except :365 except Exception: 363 366 pass # give other loaders a chance to succeed 364 367 # If we get here it is because all loaders failed -
src/sas/sascalc/dataloader/manipulations.py
r324e0bf r574adc7 26 26 27 27 #from data_info import plottable_2D 28 from data_info import Data1D28 from .data_info import Data1D 29 29 30 30 -
src/sas/sascalc/dataloader/readers/__init__.py
r488f3a5 raaa801e 1 1 # Method to associate extensions to default readers 2 from associations import read_associations2 from .associations import read_associations -
src/sas/sascalc/dataloader/readers/anton_paar_saxs_reader.py
rfafe52a ra5bd87a 63 63 ## Reinitialize the class when loading a new data file to reset all class variables 64 64 self.reset_state() 65 buff = self. f_open.read()65 buff = self.readall() 66 66 self.raw_data = buff.splitlines() 67 67 self.read_data() -
src/sas/sascalc/dataloader/readers/associations.py
rce8c7bd r574adc7 40 40 """ 41 41 # For each FileType entry, get the associated reader and extension 42 for ext, reader in settings.ite ritems():42 for ext, reader in settings.items(): 43 43 if reader is not None and ext is not None: 44 44 # Associate the extension with a particular reader … … 47 47 # and remove the extra line below. 48 48 try: 49 exec "import %s" % reader50 exec "loader.associate_file_type('%s', %s)" % (ext.lower(),51 reader)52 exec "loader.associate_file_type('%s', %s)" % (ext.upper(),53 reader)49 exec("from . import %s" % reader) 50 exec("loader.associate_file_type('%s', %s)" 51 % (ext.lower(), reader)) 52 exec("loader.associate_file_type('%s', %s)" 53 % (ext.upper(), reader)) 54 54 except: 55 55 msg = "read_associations: skipping association" -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rcd57c7d4 r7b50f14 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DefaultReaderException 16 from sas.sascalc.dataloader.file_reader_base_class import FileReader 17 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 18 20 19 21 class Reader(FileReader): … … 130 132 # Get all information for the current key 131 133 value = data.get(key) 132 if value.attrs.get(u'canSAS_class') is not None: 133 class_name = value.attrs.get(u'canSAS_class') 134 else: 135 class_name = value.attrs.get(u'NX_class') 134 class_name = h5attr(value, u'canSAS_class') 135 if class_name is None: 136 class_name = h5attr(value, u'NX_class') 136 137 if class_name is not None: 137 138 class_prog = re.compile(class_name) … … 225 226 226 227 for data_point in data_set: 228 if data_point.dtype.char == 'S': 229 data_point = decode(bytes(data_point)) 227 230 # Top Level Meta Data 228 231 if key == u'definition': … … 231 234 self.current_datainfo.run.append(data_point) 232 235 try: 233 run_name = value.attrs['name']236 run_name = h5attr(value, 'name') 234 237 run_dict = {data_point: run_name} 235 238 self.current_datainfo.run_name = run_dict 236 except :239 except Exception: 237 240 pass 238 241 elif key == u'title': … … 576 579 :return: unit for the value passed to the method 577 580 """ 578 unit = value.attrs.get(u'units')581 unit = h5attr(value, u'units') 579 582 if unit is None: 580 unit = value.attrs.get(u'unit')583 unit = h5attr(value, u'unit') 581 584 # Convert the unit formats 582 585 if unit == "1/A": -
src/sas/sascalc/dataloader/readers/danse_reader.py
ra78a02f raf3e9f5 14 14 import math 15 15 import os 16 import logging 17 16 18 import numpy as np 17 import logging 18 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector19 from sas.sascalc.dataloader.manipulations import reader2D_converter20 from sas.sascalc.dataloader.file_reader_base_class import FileReader21 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException19 20 from ..data_info import plottable_2D, DataInfo, Detector 21 from ..manipulations import reader2D_converter 22 from ..file_reader_base_class import FileReader 23 from ..loader_exceptions import FileContentsException, DataReaderException 22 24 23 25 logger = logging.getLogger(__name__) … … 78 80 data_start_line = 1 79 81 while read_on: 80 line = self. f_open.readline()82 line = self.nextline() 81 83 data_start_line += 1 82 84 if line.find("DATA:") >= 0: … … 112 114 raise FileContentsException(msg) 113 115 114 for line_num, data_str in enumerate(self. f_open.readlines()):116 for line_num, data_str in enumerate(self.nextlines()): 115 117 toks = data_str.split() 116 118 try: -
src/sas/sascalc/dataloader/readers/red2d_reader.py
r2f85af7 rc8321cfc 10 10 ###################################################################### 11 11 import os 12 import math 13 import time 14 12 15 import numpy as np 13 import math 14 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector 15 from sas.sascalc.dataloader.file_reader_base_class import FileReader 16 from sas.sascalc.dataloader.loader_exceptions import FileContentsException 17 18 # Look for unit converter 19 has_converter = True 20 try: 21 from sas.sascalc.data_util.nxsunit import Converter 22 except: 23 has_converter = False 16 17 from sas.sascalc.data_util.nxsunit import Converter 18 19 from ..data_info import plottable_2D, DataInfo, Detector 20 from ..file_reader_base_class import FileReader 21 from ..loader_exceptions import FileContentsException 24 22 25 23 … … 31 29 try: 32 30 return float(x_point) 33 except :31 except Exception: 34 32 return 0 35 33 … … 51 49 :param data: data2D 52 50 """ 53 import time54 51 # Write the file 55 52 try: … … 72 69 def get_file_contents(self): 73 70 # Read file 74 buf = self. f_open.read()71 buf = self.readall() 75 72 self.f_open.close() 76 73 # Instantiate data object … … 119 116 try: 120 117 wavelength = float(line_toks[1]) 121 # Units 122 if has_converter == True and \ 123 self.current_datainfo.source.wavelength_unit != 'A': 118 # Wavelength is stored in angstroms; convert if necessary 119 if self.current_datainfo.source.wavelength_unit != 'A': 124 120 conv = Converter('A') 125 121 wavelength = conv(wavelength, 126 122 units=self.current_datainfo.source.wavelength_unit) 127 except: 128 #Not required 129 pass 130 # Distance in mm 123 except Exception: 124 pass # Not required 131 125 try: 132 126 distance = float(line_toks[3]) 133 # Units134 if has_converter == True andself.current_datainfo.detector[0].distance_unit != 'm':127 # Distance is stored in meters; convert if necessary 128 if self.current_datainfo.detector[0].distance_unit != 'm': 135 129 conv = Converter('m') 136 130 distance = conv(distance, 137 131 units=self.current_datainfo.detector[0].distance_unit) 138 except: 139 #Not required 140 pass 141 142 # Distance in meters 132 except Exception: 133 pass # Not required 134 143 135 try: 144 136 transmission = float(line_toks[4]) 145 except: 146 #Not required 147 pass 137 except Exception: 138 pass # Not required 148 139 149 140 if line.count("LAMBDA") > 0: … … 170 161 171 162 ## Read and get data. 172 if data_started == True:163 if data_started: 173 164 line_toks = line.split() 174 165 if len(line_toks) == 0: … … 178 169 col_num = len(line_toks) 179 170 break 171 180 172 # Make numpy array to remove header lines using index 181 173 lines_array = np.array(lines) … … 203 195 # Change it(string) into float 204 196 #data_list = map(float,data_list) 205 data_list1 = map(check_point, data_list)197 data_list1 = list(map(check_point, data_list)) 206 198 207 199 # numpy array form … … 211 203 try: 212 204 data_point = data_array.reshape(row_num, col_num).transpose() 213 except :205 except Exception: 214 206 msg = "red2d_reader can't read this file: Incorrect number of data points provided." 215 207 raise FileContentsException(msg) … … 325 317 326 318 # Units of axes 327 self.current_dataset.xaxis( "\\rm{Q_{x}}", 'A^{-1}')328 self.current_dataset.yaxis( "\\rm{Q_{y}}", 'A^{-1}')329 self.current_dataset.zaxis( "\\rm{Intensity}", "cm^{-1}")319 self.current_dataset.xaxis(r"\rm{Q_{x}}", 'A^{-1}') 320 self.current_dataset.yaxis(r"\rm{Q_{y}}", 'A^{-1}') 321 self.current_dataset.zaxis(r"\rm{Intensity}", "cm^{-1}") 330 322 331 323 # Store loading process information -
src/sas/sascalc/dataloader/readers/sesans_reader.py
rbe43448 r849094a 6 6 Jurrian Bakker 7 7 """ 8 import os 9 8 10 import numpy as np 9 import os 10 from sas.sascalc.dataloader.file_reader_base_class import FileReader11 from sas.sascalc.dataloader.data_info import plottable_1D, DataInfo12 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException11 12 from ..file_reader_base_class import FileReader 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException, DataReaderException 13 15 14 16 # Check whether we have a converter available … … 42 44 self.output = [] 43 45 44 line = self. f_open.readline()46 line = self.nextline() 45 47 params = {} 46 48 while not line.startswith("BEGIN_DATA"): … … 48 50 if len(terms) >= 2: 49 51 params[terms[0]] = " ".join(terms[1:]) 50 line = self. f_open.readline()52 line = self.nextline() 51 53 self.params = params 52 54 … … 68 70 "handled by other software.") 69 71 70 headers = self. f_open.readline().split()72 headers = self.nextline().split() 71 73 72 74 self._insist_header(headers, "SpinEchoLength") -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r959eb01 r574adc7 2 2 #This software was developed by the University of Tennessee as part of the 3 3 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 4 #project funded by the US National Science Foundation. 4 #project funded by the US National Science Foundation. 5 5 #See the license text in license.txt 6 6 #copyright 2008, University of Tennessee … … 31 31 ## Extension 32 32 ext = ['.tif', '.tiff'] 33 33 34 34 def read(self, filename=None): 35 35 """ 36 36 Open and read the data in a file 37 37 38 38 :param file: path of the file 39 39 """ … … 44 44 except: 45 45 msg = "tiff_reader: could not load file. Missing Image module." 46 raise RuntimeError , msg47 46 raise RuntimeError(msg) 47 48 48 # Instantiate data object 49 49 output = Data2D() 50 50 output.filename = os.path.basename(filename) 51 51 52 52 # Read in the image 53 53 try: 54 54 im = Image.open(filename) 55 55 except: 56 raise RuntimeError , "cannot open %s"%(filename)56 raise RuntimeError("cannot open %s"%(filename)) 57 57 data = im.getdata() 58 58 … … 61 61 output.err_data = np.zeros([im.size[0], im.size[1]]) 62 62 output.mask = np.ones([im.size[0], im.size[1]], dtype=bool) 63 63 64 64 # Initialize 65 65 x_vals = [] … … 69 69 for i_x in range(im.size[0]): 70 70 x_vals.append(i_x) 71 71 72 72 itot = 0 73 73 for i_y in range(im.size[1]): … … 80 80 logger.error("tiff_reader: had to skip a non-float point") 81 81 continue 82 82 83 83 # Get bin number 84 84 if math.fmod(itot, im.size[0]) == 0: … … 87 87 else: 88 88 i_x += 1 89 89 90 90 output.data[im.size[1] - 1 - i_y][i_x] = value 91 91 92 92 itot += 1 93 93 94 94 output.xbins = im.size[0] 95 95 output.ybins = im.size[1] … … 102 102 output.ymin = 0 103 103 output.ymax = im.size[0] - 1 104 104 105 105 # Store loading process information 106 106 output.meta_data['loader'] = self.type_name -
src/sas/sascalc/dataloader/readers/xml_reader.py
rcd57c7d4 r7b50f14 16 16 17 17 import logging 18 18 19 from lxml import etree 19 20 from lxml.builder import E 20 from sas.sascalc.dataloader.file_reader_base_class import FileReader 21 22 from ..file_reader_base_class import FileReader, decode 21 23 22 24 logger = logging.getLogger(__name__) … … 151 153 Converts an etree element into a string 152 154 """ 153 return etree.tostring(elem, pretty_print=pretty_print, \154 encoding=encoding)155 return decode(etree.tostring(elem, pretty_print=pretty_print, 156 encoding=encoding)) 155 157 156 158 def break_processing_instructions(self, string, dic): -
src/sas/sascalc/file_converter/c_ext/bsl_loader.c
r2ab9c432 rd04ac05 1 1 #include <Python.h> 2 //#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION2 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 3 3 #include <numpy/arrayobject.h> 4 4 #include <stdio.h> … … 44 44 static void CLoader_dealloc(CLoader *self) { 45 45 free(self->params.filename); 46 self->ob_type->tp_free((PyObject *)self);46 Py_TYPE(self)->tp_free((PyObject *)self); 47 47 } 48 48 … … 237 237 238 238 static PyTypeObject CLoaderType = { 239 PyObject_HEAD_INIT(NULL) 240 0, /*ob_size*/ 239 //PyObject_HEAD_INIT(NULL) 240 //0, /*ob_size*/ 241 PyVarObject_HEAD_INIT(NULL, 0) 241 242 "CLoader", /*tp_name*/ 242 243 sizeof(CLoader), /*tp_basicsize*/ … … 278 279 }; 279 280 280 PyMODINIT_FUNC 281 initbsl_loader(void) 282 { 283 PyObject *module; 284 module = Py_InitModule("bsl_loader", NULL); 285 import_array(); 286 281 static PyMethodDef module_methods[] = { 282 {NULL} 283 }; 284 285 /** 286 * Function used to add the model class to a module 287 * @param module: module to add the class to 288 */ 289 void addCLoader(PyObject *module) { 287 290 if (PyType_Ready(&CLoaderType) < 0) 288 291 return; 289 292 290 293 Py_INCREF(&CLoaderType); 291 PyModule_AddObject(module, "CLoader", (PyObject *)&CLoaderType); 292 } 294 PyModule_AddObject(module, "bsl_loader", (PyObject *)&CLoaderType); 295 } 296 297 298 #define MODULE_DOC "C module for loading bsl." 299 #define MODULE_NAME "bsl_loader" 300 #define MODULE_INIT2 initbsl_loader 301 #define MODULE_INIT3 PyInit_bsl_loader 302 #define MODULE_METHODS module_methods 303 304 /* ==== boilerplate python 2/3 interface bootstrap ==== */ 305 306 307 #if defined(WIN32) && !defined(__MINGW32__) 308 #define DLL_EXPORT __declspec(dllexport) 309 #else 310 #define DLL_EXPORT 311 #endif 312 313 #if PY_MAJOR_VERSION >= 3 314 315 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT3(void) 316 { 317 static struct PyModuleDef moduledef = { 318 PyModuleDef_HEAD_INIT, 319 MODULE_NAME, /* m_name */ 320 MODULE_DOC, /* m_doc */ 321 -1, /* m_size */ 322 MODULE_METHODS, /* m_methods */ 323 NULL, /* m_reload */ 324 NULL, /* m_traverse */ 325 NULL, /* m_clear */ 326 NULL, /* m_free */ 327 }; 328 PyObject* m = PyModule_Create(&moduledef); 329 addCLoader(m); 330 return m; 331 } 332 333 #else /* !PY_MAJOR_VERSION >= 3 */ 334 335 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT2(void) 336 { 337 PyObject* m = Py_InitModule4(MODULE_NAME, 338 MODULE_METHODS, 339 MODULE_DOC, 340 0, 341 PYTHON_API_VERSION 342 ); 343 addCLoader(m); 344 } 345 346 #endif /* !PY_MAJOR_VERSION >= 3 */ -
src/sas/sascalc/file_converter/cansas_writer.py
r7432acb r574adc7 32 32 valid_class = all([issubclass(data.__class__, Data1D) for data in frame_data]) 33 33 if not valid_class: 34 raise RuntimeError ,("The cansas writer expects an array of "34 raise RuntimeError("The cansas writer expects an array of " 35 35 "Data1D instances") 36 36 -
src/sas/sascalc/file_converter/nxcansas_writer.py
r5e906207 r574adc7 166 166 'wavelength_max': 'wavelength_max', 167 167 'wavelength_spread': 'incident_wavelength_spread' } 168 for sasname, nxname in wavelength_keys.ite ritems():168 for sasname, nxname in wavelength_keys.items(): 169 169 value = getattr(data_info.source, sasname) 170 170 units = getattr(data_info.source, sasname + '_unit') -
src/sas/sascalc/fit/AbstractFitEngine.py
r50fcb09 r574adc7 251 251 msg = "FitData1D: invalid error array " 252 252 msg += "%d <> %d" % (np.shape(self.dy), np.size(fx)) 253 raise RuntimeError , msg253 raise RuntimeError(msg) 254 254 return (self.y[self.idx] - fx[self.idx]) / self.dy[self.idx], fx[self.idx] 255 255 -
src/sas/sascalc/fit/Loader.py
ra1b8fee r574adc7 18 18 self.dy = dy 19 19 self.filename = None 20 20 21 21 def set_filename(self, path=None): 22 22 """ 23 Store path into a variable.If the user doesn't give 23 Store path into a variable.If the user doesn't give 24 24 a path as a parameter a pop-up 25 25 window appears to select the file. 26 26 27 27 :param path: the path given by the user 28 28 29 29 """ 30 30 self.filename = path 31 31 32 32 def get_filename(self): 33 33 """ return the file's path""" 34 34 return self.filename 35 35 36 36 def set_values(self): 37 37 """ Store the values loaded from file in local variables""" … … 42 42 self.x = [] 43 43 self.y = [] 44 self.dx = [] 44 self.dx = [] 45 45 self.dy = [] 46 46 for line in lines: … … 50 50 y = float(toks[1]) 51 51 dy = float(toks[2]) 52 52 53 53 self.x.append(x) 54 54 self.y.append(y) … … 59 59 # Sanity check 60 60 if not len(self.x) == len(self.dx): 61 raise ValueError , "x and dx have different length"61 raise ValueError("x and dx have different length") 62 62 if not len(self.y) == len(self.dy): 63 raise ValueError , "y and dy have different length"64 65 63 raise ValueError("y and dy have different length") 64 65 66 66 def get_values(self): 67 67 """ Return x, y, dx, dy""" 68 68 return self.x, self.y, self.dx, self.dy 69 69 70 70 def load_data(self, data): 71 71 """ Return plottable""" … … 77 77 #Load its View class 78 78 #plottable.reset_view() 79 80 81 if __name__ == "__main__": 79 80 81 if __name__ == "__main__": 82 82 load = Load() 83 83 load.set_filename("testdata_line.txt") 84 print(load.get_filename()) 84 print(load.get_filename()) 85 85 load.set_values() 86 86 print(load.get_values()) 87 88 87 -
src/sas/sascalc/fit/MultiplicationModel.py
r7432acb r574adc7 109 109 """ 110 110 ##set dispersion only from p_model 111 for name , value in self.p_model.dispersion.ite ritems():111 for name , value in self.p_model.dispersion.items(): 112 112 self.dispersion[name] = value 113 113 … … 135 135 """ 136 136 137 for name , value in self.p_model.params.ite ritems():137 for name , value in self.p_model.params.items(): 138 138 if not name in self.params.keys() and name not in self.excluded_params: 139 139 self.params[name] = value 140 140 141 for name , value in self.s_model.params.ite ritems():141 for name , value in self.s_model.params.items(): 142 142 #Remove the radius_effective from the (P*S) model parameters. 143 143 if not name in self.params.keys() and name not in self.excluded_params: … … 155 155 this model's details 156 156 """ 157 for name, detail in self.p_model.details.ite ritems():157 for name, detail in self.p_model.details.items(): 158 158 if name not in self.excluded_params: 159 159 self.details[name] = detail 160 160 161 for name , detail in self.s_model.details.ite ritems():161 for name , detail in self.s_model.details.items(): 162 162 if not name in self.details.keys() or name not in self.exluded_params: 163 163 self.details[name] = detail … … 245 245 return 246 246 247 raise ValueError , "Model does not contain parameter %s" % name247 raise ValueError("Model does not contain parameter %s" % name) 248 248 249 249 -
src/sas/sascalc/fit/expression.py
ra1b8fee r574adc7 59 59 occur multiple times. The return value is a set with the elements in 60 60 no particular order. 61 61 62 62 This is the first step in computing a dependency graph. 63 63 """ … … 81 81 offset = end 82 82 pieces.append(expr[offset:]) 83 83 84 84 # Join the pieces and return them 85 85 return "".join(pieces) … … 88 88 """ 89 89 Returns a list of pair-wise dependencies from the parameter expressions. 90 90 91 91 For example, if p3 = p1+p2, then find_dependencies([p1,p2,p3]) will 92 92 return [(p3,p1),(p3,p2)]. For base expressions without dependencies, … … 110 110 """ 111 111 Find the parameter substitution we need so that expressions can 112 be evaluated without having to traverse a chain of 112 be evaluated without having to traverse a chain of 113 113 model.layer.parameter.value 114 114 """ … … 122 122 return definition, substitution 123 123 124 def no_constraints(): 124 def no_constraints(): 125 125 """ 126 126 This parameter set has no constraints between the parameters. … … 163 163 164 164 Parameter names are assumed to contain only _.a-zA-Z0-9#[] 165 165 166 166 Both names are provided for inverse functions, e.g., acos and arccos. 167 167 168 168 Should try running the function to identify syntax errors before 169 169 running it in a fit. 170 170 171 171 Use help(fn) to see the code generated for the returned function fn. 172 172 dis.dis(fn) will show the corresponding python vm instructions. … … 239 239 if independent == emptyset: 240 240 cycleset = ", ".join(str(s) for s in left) 241 raise ValueError ,"Cyclic dependencies amongst %s"%cycleset241 raise ValueError("Cyclic dependencies amongst %s"%cycleset) 242 242 243 243 # The possibly resolvable items are those that depend on the independents … … 267 267 n.sort() 268 268 items = list(items); items.sort() 269 raise Exception,"%s expect %s to contain %s for %s"%(msg,n,items,pairs)269 raise ValueError("%s expect %s to contain %s for %s"%(msg,n,items,pairs)) 270 270 for lo,hi in pairs: 271 271 if lo in n and hi in n and n.index(lo) >= n.index(hi): 272 raise Exception,"%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)272 raise ValueError("%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)) 273 273 274 274 def test_deps(): … … 288 288 # Cycle test 289 289 pairs = [(1,4),(4,3),(4,5),(5,1)] 290 try: n = order_dependencies(pairs) 291 except ValueError: pass 292 else: raise Exception,"test3 expect ValueError exception for %s"%(pairs,) 290 try: 291 n = order_dependencies(pairs) 292 except ValueError: 293 pass 294 else: 295 raise ValueError("test3 expect ValueError exception for %s"%(pairs,)) 293 296 294 297 # large test for gross speed check … … 308 311 import inspect, dis 309 312 import math 310 313 311 314 symtab = {'a.b.x':1, 'a.c':2, 'a.b':3, 'b.x':4} 312 315 expr = 'a.b.x + sin(4*pi*a.c) + a.b.x/a.b' 313 316 314 317 # Check symbol lookup 315 318 assert _symbols(expr, symtab) == set([1,2,3]) … … 357 360 expected = 2*math.pi*math.sin(5/.1875) + 6 358 361 assert p2.value == expected,"Value was %s, not %s"%(p2.value,expected) 359 362 360 363 # Check empty dependency set doesn't crash 361 364 fn = compile_constraints(*world(p1,p3)) … … 381 384 fn() 382 385 assert p5.value == 2.07,"Value for %s was %s"%(p5.expression,p5.value) 383 386 384 387 385 388 # Verify that we capture invalid expressions 386 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 389 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 387 390 'piddle', 388 391 '5; import sys; print "p0wned"', -
src/sas/sascalc/fit/pluginmodel.py
r5213d22 r574adc7 35 35 return self.function(x_val)*self.function(y_val) 36 36 elif x.__class__.__name__ == 'tuple': 37 raise ValueError , "Tuples are not allowed as input to BaseComponent models"37 raise ValueError("Tuples are not allowed as input to BaseComponent models") 38 38 else: 39 39 return self.function(x) … … 52 52 return self.function(x[0])*self.function(x[1]) 53 53 elif x.__class__.__name__ == 'tuple': 54 raise ValueError , "Tuples are not allowed as input to BaseComponent models"54 raise ValueError("Tuples are not allowed as input to BaseComponent models") 55 55 else: 56 56 return self.function(x) -
src/sas/sascalc/invariant/invariant.py
rb1f20d1 r574adc7 424 424 if not issubclass(data.__class__, LoaderData1D): 425 425 #Process only data that inherited from DataLoader.Data_info.Data1D 426 raise ValueError , "Data must be of type DataLoader.Data1D"426 raise ValueError("Data must be of type DataLoader.Data1D") 427 427 #from copy import deepcopy 428 428 new_data = (self._scale * data) - self._background … … 484 484 msg = "Length x and y must be equal" 485 485 msg += " and greater than 1; got x=%s, y=%s" % (len(data.x), len(data.y)) 486 raise ValueError , msg486 raise ValueError(msg) 487 487 else: 488 488 # Take care of smeared data … … 507 507 #iterate between for element different 508 508 #from the first and the last 509 for i in xrange(1, n - 1):509 for i in range(1, n - 1): 510 510 dxi = (data.x[i + 1] - data.x[i - 1]) / 2 511 511 total += gx[i] * data.y[i] * dxi … … 533 533 msg = "Length of data.x and data.y must be equal" 534 534 msg += " and greater than 1; got x=%s, y=%s" % (len(data.x), len(data.y)) 535 raise ValueError , msg535 raise ValueError(msg) 536 536 else: 537 537 #Create error for data without dy error … … 560 560 #iterate between for element different 561 561 #from the first and the last 562 for i in xrange(1, n - 1):562 for i in range(1, n - 1): 563 563 dxi = (data.x[i + 1] - data.x[i - 1]) / 2 564 564 total += (gx[i] * dy[i] * dxi) ** 2 … … 742 742 range = range.lower() 743 743 if range not in ['high', 'low']: 744 raise ValueError , "Extrapolation range should be 'high' or 'low'"744 raise ValueError("Extrapolation range should be 'high' or 'low'") 745 745 function = function.lower() 746 746 if function not in ['power_law', 'guinier']: 747 747 msg = "Extrapolation function should be 'guinier' or 'power_law'" 748 raise ValueError , msg748 raise ValueError(msg) 749 749 750 750 if range == 'high': 751 751 if function != 'power_law': 752 752 msg = "Extrapolation only allows a power law at high Q" 753 raise ValueError , msg753 raise ValueError(msg) 754 754 self._high_extrapolation_npts = npts 755 755 self._high_extrapolation_power = power … … 852 852 """ 853 853 if contrast <= 0: 854 raise ValueError , "The contrast parameter must be greater than zero"854 raise ValueError("The contrast parameter must be greater than zero") 855 855 856 856 # Make sure Q star is up to date … … 859 859 if self._qstar <= 0: 860 860 msg = "Invalid invariant: Invariant Q* must be greater than zero" 861 raise RuntimeError , msg861 raise RuntimeError(msg) 862 862 863 863 # Compute intermediate constant … … 869 869 if discrim < 0: 870 870 msg = "Could not compute the volume fraction: negative discriminant" 871 raise RuntimeError , msg871 raise RuntimeError(msg) 872 872 elif discrim == 0: 873 873 return 1 / 2 … … 881 881 return volume2 882 882 msg = "Could not compute the volume fraction: inconsistent results" 883 raise RuntimeError , msg883 raise RuntimeError(msg) 884 884 885 885 def get_qstar_with_error(self, extrapolation=None): -
src/sas/sascalc/pr/c_extensions/Cinvertor.c
rcb62bd5 rd04ac05 52 52 invertor_dealloc(&(self->params)); 53 53 54 self->ob_type->tp_free((PyObject*)self);54 Py_TYPE(self)->tp_free((PyObject*)self); 55 55 56 56 } … … 1054 1054 1055 1055 static PyTypeObject CinvertorType = { 1056 PyObject_HEAD_INIT(NULL) 1057 0, /*ob_size*/ 1056 //PyObject_HEAD_INIT(NULL) 1057 //0, /*ob_size*/ 1058 PyVarObject_HEAD_INIT(NULL, 0) 1058 1059 "Cinvertor", /*tp_name*/ 1059 1060 sizeof(Cinvertor), /*tp_basicsize*/ … … 1119 1120 1120 1121 1121 #ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ 1122 #define PyMODINIT_FUNC void 1122 #define MODULE_DOC "C extension module for inversion to P(r)." 1123 #define MODULE_NAME "pr_inversion" 1124 #define MODULE_INIT2 initpr_inversion 1125 #define MODULE_INIT3 PyInit_pr_inversion 1126 #define MODULE_METHODS module_methods 1127 1128 /* ==== boilerplate python 2/3 interface bootstrap ==== */ 1129 1130 1131 #if defined(WIN32) && !defined(__MINGW32__) 1132 #define DLL_EXPORT __declspec(dllexport) 1133 #else 1134 #define DLL_EXPORT 1123 1135 #endif 1124 PyMODINIT_FUNC 1125 initpr_inversion(void) 1126 { 1127 PyObject* m; 1128 1129 m = Py_InitModule3("pr_inversion", module_methods, 1130 "C extension module for inversion to P(r)."); 1131 1132 addCinvertor(m); 1133 } 1136 1137 #if PY_MAJOR_VERSION >= 3 1138 1139 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT3(void) 1140 { 1141 static struct PyModuleDef moduledef = { 1142 PyModuleDef_HEAD_INIT, 1143 MODULE_NAME, /* m_name */ 1144 MODULE_DOC, /* m_doc */ 1145 -1, /* m_size */ 1146 MODULE_METHODS, /* m_methods */ 1147 NULL, /* m_reload */ 1148 NULL, /* m_traverse */ 1149 NULL, /* m_clear */ 1150 NULL, /* m_free */ 1151 }; 1152 PyObject* m = PyModule_Create(&moduledef); 1153 addCinvertor(m); 1154 return m; 1155 } 1156 1157 #else /* !PY_MAJOR_VERSION >= 3 */ 1158 1159 DLL_EXPORT PyMODINIT_FUNC MODULE_INIT2(void) 1160 { 1161 PyObject* m = Py_InitModule4(MODULE_NAME, 1162 MODULE_METHODS, 1163 MODULE_DOC, 1164 0, 1165 PYTHON_API_VERSION 1166 ); 1167 addCinvertor(m); 1168 } 1169 1170 #endif /* !PY_MAJOR_VERSION >= 3 */ -
src/sas/sascalc/pr/fit/AbstractFitEngine.py
r50fcb09 r574adc7 251 251 msg = "FitData1D: invalid error array " 252 252 msg += "%d <> %d" % (np.shape(self.dy), np.size(fx)) 253 raise RuntimeError , msg253 raise RuntimeError(msg) 254 254 return (self.y[self.idx] - fx[self.idx]) / self.dy[self.idx], fx[self.idx] 255 255 -
src/sas/sascalc/pr/fit/Loader.py
ra1b8fee r574adc7 18 18 self.dy = dy 19 19 self.filename = None 20 20 21 21 def set_filename(self, path=None): 22 22 """ 23 Store path into a variable.If the user doesn't give 23 Store path into a variable.If the user doesn't give 24 24 a path as a parameter a pop-up 25 25 window appears to select the file. 26 26 27 27 :param path: the path given by the user 28 28 29 29 """ 30 30 self.filename = path 31 31 32 32 def get_filename(self): 33 33 """ return the file's path""" 34 34 return self.filename 35 35 36 36 def set_values(self): 37 37 """ Store the values loaded from file in local variables""" … … 42 42 self.x = [] 43 43 self.y = [] 44 self.dx = [] 44 self.dx = [] 45 45 self.dy = [] 46 46 for line in lines: … … 50 50 y = float(toks[1]) 51 51 dy = float(toks[2]) 52 52 53 53 self.x.append(x) 54 54 self.y.append(y) … … 59 59 # Sanity check 60 60 if not len(self.x) == len(self.dx): 61 raise ValueError , "x and dx have different length"61 raise ValueError("x and dx have different length") 62 62 if not len(self.y) == len(self.dy): 63 raise ValueError , "y and dy have different length"64 65 63 raise ValueError("y and dy have different length") 64 65 66 66 def get_values(self): 67 67 """ Return x, y, dx, dy""" 68 68 return self.x, self.y, self.dx, self.dy 69 69 70 70 def load_data(self, data): 71 71 """ Return plottable""" … … 77 77 #Load its View class 78 78 #plottable.reset_view() 79 80 81 if __name__ == "__main__": 79 80 81 if __name__ == "__main__": 82 82 load = Load() 83 83 load.set_filename("testdata_line.txt") 84 print(load.get_filename()) 84 print(load.get_filename()) 85 85 load.set_values() 86 86 print(load.get_values()) 87 88 87 -
src/sas/sascalc/pr/fit/expression.py
ra1b8fee r574adc7 59 59 occur multiple times. The return value is a set with the elements in 60 60 no particular order. 61 61 62 62 This is the first step in computing a dependency graph. 63 63 """ … … 81 81 offset = end 82 82 pieces.append(expr[offset:]) 83 83 84 84 # Join the pieces and return them 85 85 return "".join(pieces) … … 88 88 """ 89 89 Returns a list of pair-wise dependencies from the parameter expressions. 90 90 91 91 For example, if p3 = p1+p2, then find_dependencies([p1,p2,p3]) will 92 92 return [(p3,p1),(p3,p2)]. For base expressions without dependencies, … … 110 110 """ 111 111 Find the parameter substitution we need so that expressions can 112 be evaluated without having to traverse a chain of 112 be evaluated without having to traverse a chain of 113 113 model.layer.parameter.value 114 114 """ … … 122 122 return definition, substitution 123 123 124 def no_constraints(): 124 def no_constraints(): 125 125 """ 126 126 This parameter set has no constraints between the parameters. … … 163 163 164 164 Parameter names are assumed to contain only _.a-zA-Z0-9#[] 165 165 166 166 Both names are provided for inverse functions, e.g., acos and arccos. 167 167 168 168 Should try running the function to identify syntax errors before 169 169 running it in a fit. 170 170 171 171 Use help(fn) to see the code generated for the returned function fn. 172 172 dis.dis(fn) will show the corresponding python vm instructions. … … 239 239 if independent == emptyset: 240 240 cycleset = ", ".join(str(s) for s in left) 241 raise ValueError ,"Cyclic dependencies amongst %s"%cycleset241 raise ValueError("Cyclic dependencies amongst %s"%cycleset) 242 242 243 243 # The possibly resolvable items are those that depend on the independents … … 267 267 n.sort() 268 268 items = list(items); items.sort() 269 raise Exception,"%s expect %s to contain %s for %s"%(msg,n,items,pairs)269 raise ValueError("%s expect %s to contain %s for %s"%(msg,n,items,pairs)) 270 270 for lo,hi in pairs: 271 271 if lo in n and hi in n and n.index(lo) >= n.index(hi): 272 raise Exception,"%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)272 raise ValueError("%s expect %s before %s in %s for %s"%(msg,lo,hi,n,pairs)) 273 273 274 274 def test_deps(): … … 288 288 # Cycle test 289 289 pairs = [(1,4),(4,3),(4,5),(5,1)] 290 try: n = order_dependencies(pairs) 291 except ValueError: pass 292 else: raise Exception,"test3 expect ValueError exception for %s"%(pairs,) 290 try: 291 n = order_dependencies(pairs) 292 except ValueError: 293 pass 294 else: 295 raise Exception("test3 expect ValueError exception for %s"%(pairs,)) 293 296 294 297 # large test for gross speed check … … 308 311 import inspect, dis 309 312 import math 310 313 311 314 symtab = {'a.b.x':1, 'a.c':2, 'a.b':3, 'b.x':4} 312 315 expr = 'a.b.x + sin(4*pi*a.c) + a.b.x/a.b' 313 316 314 317 # Check symbol lookup 315 318 assert _symbols(expr, symtab) == set([1,2,3]) … … 357 360 expected = 2*math.pi*math.sin(5/.1875) + 6 358 361 assert p2.value == expected,"Value was %s, not %s"%(p2.value,expected) 359 362 360 363 # Check empty dependency set doesn't crash 361 364 fn = compile_constraints(*world(p1,p3)) … … 381 384 fn() 382 385 assert p5.value == 2.07,"Value for %s was %s"%(p5.expression,p5.value) 383 386 384 387 385 388 # Verify that we capture invalid expressions 386 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 389 for expr in ['G4.cage', 'M0.cage', 'M1.G1 + *2', 387 390 'piddle', 388 391 '5; import sys; print "p0wned"', -
src/sas/sascalc/pr/invertor.py
rcb62bd5 rd04ac05 148 148 msg = "Invertor: one of your q-values is zero. " 149 149 msg += "Delete that entry before proceeding" 150 raise ValueError , msg150 raise ValueError(msg) 151 151 return self.set_x(value) 152 152 elif name == 'y': … … 159 159 msg = "Invertor: d_max must be greater than zero." 160 160 msg += "Correct that entry before proceeding" 161 raise ValueError , msg161 raise ValueError(msg) 162 162 return self.set_dmax(value) 163 163 elif name == 'q_min': … … 181 181 return self.set_est_bck(0) 182 182 else: 183 raise ValueError , "Invertor: est_bck can only be True or False"183 raise ValueError("Invertor: est_bck can only be True or False") 184 184 185 185 return Cinvertor.__setattr__(self, name, value) … … 331 331 if self.is_valid() <= 0: 332 332 msg = "Invertor.invert: Data array are of different length" 333 raise RuntimeError , msg333 raise RuntimeError(msg) 334 334 335 335 p = np.ones(nfunc) … … 364 364 if self.is_valid() <= 0: 365 365 msg = "Invertor.invert: Data arrays are of different length" 366 raise RuntimeError , msg366 raise RuntimeError(msg) 367 367 368 368 p = np.ones(nfunc) … … 448 448 if self.is_valid() < 0: 449 449 msg = "Invertor: invalid data; incompatible data lengths." 450 raise RuntimeError , msg450 raise RuntimeError(msg) 451 451 452 452 self.nfunc = nfunc … … 472 472 try: 473 473 self._get_matrix(nfunc, nq, a, b) 474 except :475 raise RuntimeError , "Invertor: could not invert I(Q)\n %s" % sys.exc_value474 except Exception as exc: 475 raise RuntimeError("Invertor: could not invert I(Q)\n %s" % str(exc)) 476 476 477 477 # Perform the inversion (least square fit) … … 756 756 except: 757 757 msg = "Invertor.from_file: corrupted file\n%s" % sys.exc_value 758 raise RuntimeError , msg758 raise RuntimeError(msg) 759 759 else: 760 760 msg = "Invertor.from_file: '%s' is not a file" % str(path) 761 raise RuntimeError , msg761 raise RuntimeError(msg)
Note: See TracChangeset
for help on using the changeset viewer.