Changeset 3c903ea in sasview for src/sas/sascalc
- Timestamp:
- Apr 6, 2017 4:36:51 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- d01b55c
- Parents:
- d3bce8c (diff), 5e2f36c (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 15 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/calculator/resolution_calculator.py
r9a5097c r9c0f3c17 15 15 import sys 16 16 import logging 17 18 logger = logging.getLogger(__name__) 17 19 18 20 #Plank's constant in cgs unit … … 1006 1008 detector_offset = self.sample2detector_distance[1] 1007 1009 except: 1008 logg ing.error(sys.exc_value)1010 logger.error(sys.exc_value) 1009 1011 1010 1012 # detector size in [no of pix_x,no of pix_y] … … 1093 1095 output.qy_data = qy_value 1094 1096 except: 1095 logg ing.error(sys.exc_value)1097 logger.error(sys.exc_value) 1096 1098 1097 1099 return output -
src/sas/sascalc/calculator/sas_gen.py
r9a5097c r9c0f3c17 12 12 import sys 13 13 import logging 14 15 logger = logging.getLogger(__name__) 14 16 15 17 MFACTOR_AM = 2.853E-12 … … 285 287 mask = (x_dir2 + y_dir2 + z_dir2) <= 1.0 286 288 except: 287 logg ing.error(sys.exc_value)289 logger.error(sys.exc_value) 288 290 self.output = MagSLD(self.pos_x[mask], self.pos_y[mask], 289 291 self.pos_z[mask], self.sld_n[mask], … … 394 396 except: 395 397 # Skip non-data lines 396 logg ing.error(sys.exc_value)398 logger.error(sys.exc_value) 397 399 #Reading Header; Segment count ignored 398 400 s_line = line.split(":", 1) … … 589 591 z_lines.append(z_line) 590 592 except: 591 logg ing.error(sys.exc_value)593 logger.error(sys.exc_value) 592 594 593 595 output = MagSLD(pos_x, pos_y, pos_z, sld_n, sld_mx, sld_my, sld_mz) … … 683 685 except: 684 686 # Skip non-data lines 685 logg ing.error(sys.exc_value)687 logger.error(sys.exc_value) 686 688 output = MagSLD(pos_x, pos_y, pos_z, sld_n, 687 689 sld_mx, sld_my, sld_mz) -
src/sas/sascalc/data_util/calcthread.py
r934ce649 r64ca561 8 8 import traceback 9 9 import sys 10 import logging 10 11 11 12 if sys.platform.count("darwin") > 0: … … 21 22 from time import clock 22 23 from time import sleep 24 25 logger = logging.getLogger(__name__) 23 26 24 27 … … 243 246 except Exception: 244 247 pass 245 import logging 246 logging.error(traceback.format_exc()) 248 logger.error(traceback.format_exc()) 247 249 #print 'CalcThread exception', 248 250 -
src/sas/sascalc/dataloader/loader.py
rb699768 r463e7ffc 32 32 from readers import cansas_reader 33 33 34 logger = logging.getLogger(__name__) 35 34 36 class Registry(ExtensionRegistry): 35 37 """ … … 99 101 msg = "DataLoader couldn't locate DataLoader plugin folder." 100 102 msg += """ "%s" does not exist""" % dir 101 logg ing.warning(msg)103 logger.warning(msg) 102 104 return readers_found 103 105 … … 117 119 msg = "Loader: Error importing " 118 120 msg += "%s\n %s" % (item, sys.exc_value) 119 logg ing.error(msg)121 logger.error(msg) 120 122 121 123 # Process zip files … … 139 141 msg = "Loader: Error importing" 140 142 msg += " %s\n %s" % (mfile, sys.exc_value) 141 logg ing.error(msg)143 logger.error(msg) 142 144 143 145 except: 144 146 msg = "Loader: Error importing " 145 147 msg += " %s\n %s" % (item, sys.exc_value) 146 logg ing.error(msg)148 logger.error(msg) 147 149 148 150 return readers_found … … 190 192 msg = "Loader: Error accessing" 191 193 msg += " Reader in %s\n %s" % (module.__name__, sys.exc_value) 192 logg ing.error(msg)194 logger.error(msg) 193 195 return reader_found 194 196 … … 223 225 msg = "Loader: Error accessing Reader " 224 226 msg += "in %s\n %s" % (loader.__name__, sys.exc_value) 225 logg ing.error(msg)227 logger.error(msg) 226 228 return reader_found 227 229 … … 268 270 msg = "Loader: Error accessing Reader" 269 271 msg += " in %s\n %s" % (module.__name__, sys.exc_value) 270 logg ing.error(msg)272 logger.error(msg) 271 273 return reader_found 272 274 -
src/sas/sascalc/dataloader/readers/associations.py
re5c09cf r463e7ffc 18 18 import logging 19 19 import json 20 21 logger = logging.getLogger(__name__) 20 22 21 23 FILE_NAME = 'defaults.json' … … 67 69 msg = "read_associations: skipping association" 68 70 msg += " for %s\n %s" % (ext.lower(), sys.exc_value) 69 logg ing.error(msg)71 logger.error(msg) 70 72 else: 71 73 print "Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd()) … … 81 83 :param registry_function: function to be called to register each reader 82 84 """ 83 logg ing.info("register_readers is now obsolete: use read_associations()")85 logger.info("register_readers is now obsolete: use read_associations()") 84 86 import abs_reader 85 87 import ascii_reader -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r747334d r3c903ea 33 33 import xml.dom.minidom 34 34 from xml.dom.minidom import parseString 35 36 logger = logging.getLogger(__name__) 35 37 36 38 PREPROCESS = "xmlpreprocess" … … 1475 1477 self.errors.add(err_mess) 1476 1478 if optional: 1477 logg ing.info(err_mess)1479 logger.info(err_mess) 1478 1480 else: 1479 1481 raise ValueError, err_mess … … 1484 1486 self.errors.add(err_mess) 1485 1487 if optional: 1486 logg ing.info(err_mess)1488 logger.info(err_mess) 1487 1489 else: 1488 1490 raise ValueError, err_mess -
src/sas/sascalc/dataloader/readers/danse_reader.py
r9a5097c r9c0f3c17 19 19 from sas.sascalc.dataloader.data_info import Data2D, Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 22 logger = logging.getLogger(__name__) 21 23 22 24 # Look for unit converter … … 142 144 error.append(err) 143 145 except: 144 logg ing.info("Skipping line:%s,%s" %(data_str,146 logger.info("Skipping line:%s,%s" %(data_str, 145 147 sys.exc_value)) 146 148 … … 196 198 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 197 199 sys.exc_value) 198 logg ing.info(msg)200 logger.info(msg) 199 201 200 202 # Get bin number … … 271 273 raise ValueError, msg 272 274 else: 273 logg ing.info("Danse_reader Reading %s \n" % filename)275 logger.info("Danse_reader Reading %s \n" % filename) 274 276 275 277 # Store loading process information -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r9a5097c r9c0f3c17 16 16 from sas.sascalc.dataloader.data_info import Data2D 17 17 from sas.sascalc.dataloader.manipulations import reader2D_converter 18 18 19 logger = logging.getLogger(__name__) 20 19 21 class Reader: 20 22 """ … … 76 78 value = float(val) 77 79 except: 78 logg ing.error("tiff_reader: had to skip a non-float point")80 logger.error("tiff_reader: had to skip a non-float point") 79 81 continue 80 82 -
src/sas/sascalc/dataloader/readers/xml_reader.py
ra235f715 r463e7ffc 18 18 from lxml import etree 19 19 from lxml.builder import E 20 21 logger = logging.getLogger(__name__) 20 22 21 23 PARSER = etree.ETCompatXMLParser(remove_comments=True, remove_pis=False) … … 71 73 self.xmlroot = self.xmldoc.getroot() 72 74 except etree.XMLSyntaxError as xml_error: 73 logg ing.info(xml_error)75 logger.info(xml_error) 74 76 except Exception: 75 77 self.xml = None … … 88 90 self.xmlroot = etree.fromstring(tag_soup) 89 91 except etree.XMLSyntaxError as xml_error: 90 logg ing.info(xml_error)92 logger.info(xml_error) 91 93 except Exception: 92 94 self.xml = None … … 102 104 self.schemadoc = etree.parse(self.schema, parser=PARSER) 103 105 except etree.XMLSyntaxError as xml_error: 104 logg ing.info(xml_error)106 logger.info(xml_error) 105 107 except Exception: 106 108 self.schema = None -
src/sas/sascalc/pr/invertor.py
r9a5097c r9c0f3c17 18 18 from scipy import optimize 19 19 from sas.sascalc.pr.core.pr_inversion import Cinvertor 20 21 logger = logging.getLogger(__name__) 20 22 21 23 def help(): … … 495 497 # We were not able to estimate the errors 496 498 # Return an empty error matrix 497 logg ing.error(sys.exc_value)499 logger.error(sys.exc_value) 498 500 499 501 # Keep a copy of the last output … … 541 543 # number of terms 542 544 best_alpha, _, _ = self.estimate_alpha(self.nfunc) 543 logg ing.warning("Invertor.estimate_numterms: %s" % sys.exc_value)545 logger.warning("Invertor.estimate_numterms: %s" % sys.exc_value) 544 546 return self.nfunc, best_alpha, "Could not estimate number of terms" 545 547 -
src/sas/sascalc/pr/num_term.py
r9a5097c r9c0f3c17 5 5 import logging 6 6 from sas.sascalc.pr.invertor import Invertor 7 8 logger = logging.getLogger(__name__) 7 9 8 10 class NTermEstimator(object): … … 180 182 data_err = np.append(data_err, err) 181 183 except: 182 logg ing.error(sys.exc_value)184 logger.error(sys.exc_value) 183 185 184 186 return data_x, data_y, data_err -
src/sas/sascalc/data_util/qsmearing.py
r9a5097c r0ac6e11 67 67 if _found_sesans == True: 68 68 #Pre-compute the Hankel matrix (H) 69 qmax, qunits = data.sample.zacceptance70 69 SElength = Converter(data._xunit)(data.x, "A") 71 zaccept = Converter(qunits)(qmax, "1/A"), 70 71 theta_max = Converter("radians")(data.sample.zacceptance)[0] 72 q_max = 2 * np.pi / np.max(data.source.wavelength) * np.sin(theta_max) 73 zaccept = Converter("1/A")(q_max, "1/" + data.source.wavelength_unit), 74 72 75 Rmax = 10000000 73 76 hankel = SesansTransform(data.x, SElength, zaccept, Rmax) -
src/sas/sascalc/dataloader/data_info.py
r9a5097c r747334d 354 354 ## SESANS zacceptance 355 355 zacceptance = None 356 yacceptance = None 356 357 357 358 def __init__(self): -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rad4632c r747334d 135 135 "Sesans": {"storeas": "content"}, 136 136 "zacceptance": {"storeas": "float"}, 137 "yacceptance": {"storeas": "float"}, 137 138 "<any>" : ANY 138 139 } -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c r0ac6e11 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 import logging 8 9 import numpy as np 9 10 import os … … 18 19 _ZERO = 1e-16 19 20 21 20 22 class Reader: 21 23 """ 22 24 Class to load sesans files (6 columns). 23 25 """ 24 # #File type26 # File type 25 27 type_name = "SESANS" 26 27 # #Wildcards28 29 # Wildcards 28 30 type = ["SESANS files (*.ses)|*.ses", 29 31 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions32 # List of allowed extensions 31 33 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check34 35 # Flag to bypass extension check 34 36 allow_all = True 35 37 36 38 def read(self, path): 37 38 # print "reader triggered"39 40 39 """ 41 40 Load data file 42 41 43 42 :param path: file path 44 43 45 44 :return: SESANSData1D object, or None 46 45 47 46 :raise RuntimeError: when the file can't be opened 48 47 :raise ValueError: when the length of the data vectors are inconsistent … … 51 50 basename = os.path.basename(path) 52 51 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 52 if not (self.allow_all or extension.lower() in self.ext): 53 raise RuntimeError("{} has an unrecognized file extension".format(path)) 54 else: 55 raise RunetimeError("{} is not a file".format(path)) 56 with open(path, 'r') as input_f: 57 # Read in binary mode since GRASP frequently has no-ascii 58 # characters that brakes the open operation 59 line = input_f.readline() 60 params = {} 61 while not line.startswith("BEGIN_DATA"): 62 terms = line.split() 63 if len(terms) >= 2: 64 params[terms[0]] = " ".join(terms[1:]) 65 line = input_f.readline() 66 self.params = params 67 headers = input_f.readline().split() 78 68 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 69 data = np.loadtxt(input_f) 70 if data.size < 1: 71 raise RuntimeError("{} is empty".format(path)) 72 x = data[:, headers.index("SpinEchoLength")] 73 dx = data[:, headers.index("SpinEchoLength_error")] 74 lam = data[:, headers.index("Wavelength")] 75 dlam = data[:, headers.index("Wavelength_error")] 76 y = data[:, headers.index("Depolarisation")] 77 dy = data[:, headers.index("Depolarisation_error")] 87 78 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 79 lam_unit = self._unit_fetch("Wavelength") 80 x, x_unit = self._unit_conversion(x, "A", self._unit_fetch("SpinEchoLength")) 81 dx, dx_unit = self._unit_conversion( 82 dx, lam_unit, 83 self._unit_fetch("SpinEchoLength")) 84 dlam, dlam_unit = self._unit_conversion( 85 dlam, lam_unit, 86 self._unit_fetch("Wavelength")) 87 y_unit = self._unit_fetch("Depolarisation") 104 88 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 89 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 90 isSesans=True) 128 91 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 92 output.y_unit = y_unit 93 output.x_unit = x_unit 94 output.source.wavelength_unit = lam_unit 95 output.source.wavelength = lam 96 self.filename = output.filename = basename 97 output.xaxis(r"\rm{z}", x_unit) 98 # Adjust label to ln P/(lam^2 t), remove lam column refs 99 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 100 # Store loading process information 101 output.meta_data['loader'] = self.type_name 102 output.sample.name = params["Sample"] 103 output.sample.ID = params["DataFileTitle"] 104 output.sample.thickness = float( 105 self._unit_conversion( 106 params["Thickness"], "cm", self._unit_fetch("Thickness"))[0]) 133 107 134 input_f.close() 108 output.sample.zacceptance = ( 109 float(params["Theta_zmax"]), 110 self._unit_fetch("Theta_zmax")) 135 111 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 112 output.sample.yacceptance = ( 113 float(params["Theta_ymax"]), 114 self._unit_fetch("Theta_ymax")) 115 return output 146 116 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 117 @staticmethod 118 def _unit_conversion(value, value_unit, default_unit): 119 """ 120 Performs unit conversion on a measurement. 158 121 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 162 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 166 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 122 :param value: The magnitude of the measurement 123 :param value_unit: a string containing the final desired unit 124 :param default_unit: a string containing the units of the original measurement 125 :return: The magnitude of the measurement in the new units 126 """ 127 # (float, string, string) -> float 128 if has_converter and value_unit != default_unit: 169 129 data_conv_q = Converter(value_unit) 170 130 value = data_conv_q(value, units=default_unit) … … 173 133 new_unit = value_unit 174 134 return value, new_unit 135 136 def _unit_fetch(self, unit): 137 return self.params[unit+"_unit"]
Note: See TracChangeset
for help on using the changeset viewer.