Changeset fadb757 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- May 2, 2017 5:36:21 AM (8 years ago)
- Parents:
- 658dd57 (diff), 8390cf6 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - git-author:
- Adam Washington <rprospero@…> (05/02/17 05:36:21)
- git-committer:
- GitHub <noreply@…> (05/02/17 05:36:21)
- Location:
- src/sas/sascalc/dataloader
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c rcb9feea8 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 8 import numpy as np … … 18 18 _ZERO = 1e-16 19 19 20 20 21 class Reader: 21 22 """ 22 23 Class to load sesans files (6 columns). 23 24 """ 24 # #File type25 # File type 25 26 type_name = "SESANS" 26 27 # #Wildcards27 28 # Wildcards 28 29 type = ["SESANS files (*.ses)|*.ses", 29 30 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions31 # List of allowed extensions 31 32 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check33 34 # Flag to bypass extension check 34 35 allow_all = True 35 36 36 37 def read(self, path): 37 38 # print "reader triggered"39 40 38 """ 41 39 Load data file 42 40 43 41 :param path: file path 44 42 45 43 :return: SESANSData1D object, or None 46 44 47 45 :raise RuntimeError: when the file can't be opened 48 46 :raise ValueError: when the length of the data vectors are inconsistent … … 51 49 basename = os.path.basename(path) 52 50 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 51 if not (self.allow_all or extension.lower() in self.ext): 52 raise RuntimeError("{} has an unrecognized file extension".format(path)) 53 else: 54 raise RunetimeError("{} is not a file".format(path)) 55 with open(path, 'r') as input_f: 56 # Read in binary mode since GRASP frequently has no-ascii 57 # characters that brakes the open operation 58 line = input_f.readline() 59 params = {} 60 while line.strip() != "": 61 terms = line.strip().split("\t") 62 params[terms[0].strip()] = " ".join(terms[1:]).strip() 63 line = input_f.readline() 64 headers_temp = input_f.readline().strip().split("\t") 65 headers = {} 66 for h in headers_temp: 67 temp = h.strip().split() 68 headers[h[:-1].strip()] = temp[-1][1:-1] 69 data = np.loadtxt(input_f) 70 if data.size < 1: 71 raise RuntimeError("{} is empty".format(path)) 72 x = data[:, 0] 73 dx = data[:, 3] 74 lam = data[:, 4] 75 dlam = data[:, 5] 76 y = data[:, 1] 77 dy = data[:, 2] 78 78 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 79 lam_unit = self._header_fetch(headers, "wavelength") 80 if lam_unit == "AA": 81 lam_unit = "A" 87 82 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 83 x, x_unit = self._unit_conversion( 84 x, lam_unit, 85 self._fetch_unit(headers, "spin echo length")) 86 dx, dx_unit = self._unit_conversion( 87 dx, lam_unit, 88 self._fetch_unit(headers, "error SEL")) 89 dlam, dlam_unit = self._unit_conversion( 90 dlam, lam_unit, 91 self._fetch_unit(headers, "error wavelength")) 92 y_unit = r'\AA^{-2} cm^{-1}' 104 93 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 94 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 95 isSesans=True) 96 self.filename = output.filename = basename 97 output.xaxis(r"\rm{z}", x_unit) 98 # Adjust label to ln P/(lam^2 t), remove lam column refs 99 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 100 # Store loading process information 101 output.meta_data['loader'] = self.type_name 102 output.sample.name = params["Sample"] 103 output.sample.ID = params["DataFileTitle"] 128 104 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 105 output.sample.zacceptance = ( 106 float(self._header_fetch(params, "Q_zmax")), 107 self._fetch_unit(params, "Q_zmax")) 133 108 134 input_f.close() 109 output.sample.yacceptance = ( 110 float(self._header_fetch(params, "Q_ymax")), 111 self._fetch_unit(params, "Q_ymax")) 112 return output 135 113 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 114 @staticmethod 115 def _unit_conversion(value, value_unit, default_unit): 116 """ 117 Performs unit conversion on a measurement. 146 118 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 158 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 162 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 166 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 119 :param value: The magnitude of the measurement 120 :param value_unit: a string containing the final desired unit 121 :param default_unit: a string containing the units of the original measurement 122 :return: The magnitude of the measurement in the new units 123 """ 124 # (float, string, string) -> float 125 if has_converter and value_unit != default_unit: 169 126 data_conv_q = Converter(value_unit) 170 127 value = data_conv_q(value, units=default_unit) … … 173 130 new_unit = value_unit 174 131 return value, new_unit 132 133 @staticmethod 134 def _header_fetch(headers, key): 135 """ 136 Pull the value of a unit defined header from a dict. Example:: 137 138 d = {"Length [m]": 17} 139 self._header_fetch(d, "Length") == 17 140 141 :param header: A dictionary of values 142 :param key: A string which is a prefix for one of the keys in the dict 143 :return: The value of the dictionary for the specified key 144 """ 145 # (dict<string, x>, string) -> x 146 index = [k for k in headers.keys() 147 if k.startswith(key)][0] 148 return headers[index] 149 150 @staticmethod 151 def _fetch_unit(params, key): 152 """ 153 Pull the unit off of a dictionary header. Example:: 154 155 d = {"Length [m]": 17} 156 self._fetch_unit(d, "Length") == "m" 157 158 :param header: A dictionary of values, where the keys are strings 159 with the units for the values appended onto the string within square 160 brackets (See the example above) 161 :param key: A string with the prefix of the dictionary key whose unit 162 is being fetched 163 :return: A string containing the unit specifed in the header 164 """ 165 # (dict<string, _>, string) -> string 166 index = [k for k in params.keys() 167 if k.startswith(key)][0] 168 unit = index.strip().split()[-1][1:-1] 169 if unit.startswith(r"\A"): 170 unit = "1/A" 171 return unit -
src/sas/sascalc/dataloader/data_info.py
r9a5097c ra1b8fee 16 16 ###################################################################### 17 17 18 from __future__ import print_function 18 19 19 20 #TODO: Keep track of data manipulation in the 'process' data structure. … … 353 354 details = None 354 355 ## SESANS zacceptance 355 zacceptance = None 356 zacceptance = (0,"") 357 yacceptance = (0,"") 356 358 357 359 def __init__(self): … … 805 807 # create zero vector 806 808 dy_other = other.dy 807 if other.dy ==None or (len(other.dy) != len(other.y)):809 if other.dy is None or (len(other.dy) != len(other.y)): 808 810 dy_other = np.zeros(len(other.y)) 809 811 810 812 # Check that we have errors, otherwise create zero vector 811 813 dy = self.dy 812 if self.dy ==None or (len(self.dy) != len(self.y)):814 if self.dy is None or (len(self.dy) != len(self.y)): 813 815 dy = np.zeros(len(self.y)) 814 816 … … 821 823 dy, dy_other = self._validity_check(other) 822 824 result = self.clone_without_data(len(self.x)) 823 if self.dxw ==None:825 if self.dxw is None: 824 826 result.dxw = None 825 827 else: 826 828 result.dxw = np.zeros(len(self.x)) 827 if self.dxl ==None:829 if self.dxl is None: 828 830 result.dxl = None 829 831 else: … … 883 885 self._validity_check_union(other) 884 886 result = self.clone_without_data(len(self.x) + len(other.x)) 885 if self.dy ==None or other.dy is None:887 if self.dy is None or other.dy is None: 886 888 result.dy = None 887 889 else: 888 890 result.dy = np.zeros(len(self.x) + len(other.x)) 889 if self.dx ==None or other.dx is None:891 if self.dx is None or other.dx is None: 890 892 result.dx = None 891 893 else: 892 894 result.dx = np.zeros(len(self.x) + len(other.x)) 893 if self.dxw ==None or other.dxw is None:895 if self.dxw is None or other.dxw is None: 894 896 result.dxw = None 895 897 else: 896 898 result.dxw = np.zeros(len(self.x) + len(other.x)) 897 if self.dxl ==None or other.dxl is None:899 if self.dxl is None or other.dxl is None: 898 900 result.dxl = None 899 901 else: … … 906 908 result.y = np.append(self.y, other.y) 907 909 result.y = result.y[ind] 908 if result.dy !=None:910 if result.dy is not None: 909 911 result.dy = np.append(self.dy, other.dy) 910 912 result.dy = result.dy[ind] … … 1029 1031 # Check that the scales match 1030 1032 err_other = other.err_data 1031 if other.err_data ==None or \1033 if other.err_data is None or \ 1032 1034 (len(other.err_data) != len(other.data)): 1033 1035 err_other = np.zeros(len(other.data)) … … 1035 1037 # Check that we have errors, otherwise create zero vector 1036 1038 err = self.err_data 1037 if self.err_data ==None or \1039 if self.err_data is None or \ 1038 1040 (len(self.err_data) != len(self.data)): 1039 1041 err = np.zeros(len(other.data)) … … 1050 1052 dy, dy_other = self._validity_check(other) 1051 1053 result = self.clone_without_data(np.size(self.data)) 1052 if self.dqx_data == None or self.dqy_data ==None:1054 if self.dqx_data is None or self.dqy_data is None: 1053 1055 result.dqx_data = None 1054 1056 result.dqy_data = None … … 1124 1126 result.ymin = self.ymin 1125 1127 result.ymax = self.ymax 1126 if self.dqx_data == None or self.dqy_data ==None or \1127 other.dqx_data == None or other.dqy_data ==None:1128 if self.dqx_data is None or self.dqy_data is None or \ 1129 other.dqx_data is None or other.dqy_data is None: 1128 1130 result.dqx_data = None 1129 1131 result.dqy_data = None -
src/sas/sascalc/dataloader/loader.py
rb699768 r463e7ffc 32 32 from readers import cansas_reader 33 33 34 logger = logging.getLogger(__name__) 35 34 36 class Registry(ExtensionRegistry): 35 37 """ … … 99 101 msg = "DataLoader couldn't locate DataLoader plugin folder." 100 102 msg += """ "%s" does not exist""" % dir 101 logg ing.warning(msg)103 logger.warning(msg) 102 104 return readers_found 103 105 … … 117 119 msg = "Loader: Error importing " 118 120 msg += "%s\n %s" % (item, sys.exc_value) 119 logg ing.error(msg)121 logger.error(msg) 120 122 121 123 # Process zip files … … 139 141 msg = "Loader: Error importing" 140 142 msg += " %s\n %s" % (mfile, sys.exc_value) 141 logg ing.error(msg)143 logger.error(msg) 142 144 143 145 except: 144 146 msg = "Loader: Error importing " 145 147 msg += " %s\n %s" % (item, sys.exc_value) 146 logg ing.error(msg)148 logger.error(msg) 147 149 148 150 return readers_found … … 190 192 msg = "Loader: Error accessing" 191 193 msg += " Reader in %s\n %s" % (module.__name__, sys.exc_value) 192 logg ing.error(msg)194 logger.error(msg) 193 195 return reader_found 194 196 … … 223 225 msg = "Loader: Error accessing Reader " 224 226 msg += "in %s\n %s" % (loader.__name__, sys.exc_value) 225 logg ing.error(msg)227 logger.error(msg) 226 228 return reader_found 227 229 … … 268 270 msg = "Loader: Error accessing Reader" 269 271 msg += " in %s\n %s" % (module.__name__, sys.exc_value) 270 logg ing.error(msg)272 logger.error(msg) 271 273 return reader_found 272 274 -
src/sas/sascalc/dataloader/manipulations.py
rdd11014 r7432acb 210 210 y[i_q] += frac * data[npts] 211 211 212 if err_data ==None or err_data[npts] == 0.0:212 if err_data is None or err_data[npts] == 0.0: 213 213 if data[npts] < 0: 214 214 data[npts] = -data[npts] … … 333 333 continue 334 334 y += frac * data[npts] 335 if err_data ==None or err_data[npts] == 0.0:335 if err_data is None or err_data[npts] == 0.0: 336 336 if data[npts] < 0: 337 337 data[npts] = -data[npts] … … 422 422 423 423 # Get the dq for resolution averaging 424 if data2D.dqx_data != None and data2D.dqy_data !=None:424 if data2D.dqx_data is not None and data2D.dqy_data is not None: 425 425 # The pinholes and det. pix contribution present 426 426 # in both direction of the 2D which must be subtracted when … … 462 462 463 463 #q_data_max = numpy.max(q_data) 464 if len(data2D.q_data) ==None:464 if len(data2D.q_data) is None: 465 465 msg = "Circular averaging: invalid q_data: %g" % data2D.q_data 466 466 raise RuntimeError, msg … … 502 502 # Take dqs from data to get the q_average 503 503 x[i_q] += frac * q_value 504 if err_data ==None or err_data[npt] == 0.0:504 if err_data is None or err_data[npt] == 0.0: 505 505 if data_n < 0: 506 506 data_n = -data_n … … 508 508 else: 509 509 err_y[i_q] += frac * frac * err_data[npt] * err_data[npt] 510 if dq_data !=None:510 if dq_data is not None: 511 511 # To be consistent with dq calculation in 1d reduction, 512 512 # we need just the averages (not quadratures) because … … 523 523 err_y[n] = -err_y[n] 524 524 err_y[n] = math.sqrt(err_y[n]) 525 #if err_x !=None:525 #if err_x is not None: 526 526 # err_x[n] = math.sqrt(err_x[n]) 527 527 … … 532 532 idx = (numpy.isfinite(y)) & (numpy.isfinite(x)) 533 533 534 if err_x !=None:534 if err_x is not None: 535 535 d_x = err_x[idx] / y_counts[idx] 536 536 else: … … 623 623 phi_bins[i_phi] += frac * data[npt] 624 624 625 if err_data ==None or err_data[npt] == 0.0:625 if err_data is None or err_data[npt] == 0.0: 626 626 if data_n < 0: 627 627 data_n = -data_n … … 777 777 778 778 # Get the dq for resolution averaging 779 if data2D.dqx_data != None and data2D.dqy_data !=None:779 if data2D.dqx_data is not None and data2D.dqy_data is not None: 780 780 # The pinholes and det. pix contribution present 781 781 # in both direction of the 2D which must be subtracted when … … 888 888 y[i_bin] += frac * data_n 889 889 x[i_bin] += frac * q_value 890 if err_data[n] ==None or err_data[n] == 0.0:890 if err_data[n] is None or err_data[n] == 0.0: 891 891 if data_n < 0: 892 892 data_n = -data_n … … 895 895 y_err[i_bin] += frac * frac * err_data[n] * err_data[n] 896 896 897 if dq_data !=None:897 if dq_data is not None: 898 898 # To be consistent with dq calculation in 1d reduction, 899 899 # we need just the averages (not quadratures) because … … 925 925 y_err[y_err == 0] = numpy.average(y_err) 926 926 idx = (numpy.isfinite(y) & numpy.isfinite(y_err)) 927 if x_err !=None:927 if x_err is not None: 928 928 d_x = x_err[idx] / y_counts[idx] 929 929 else: -
src/sas/sascalc/dataloader/readers/IgorReader.py
rdd11014 ra1b8fee 12 12 #copyright 2008, University of Tennessee 13 13 ############################################################################# 14 from __future__ import print_function 15 14 16 import os 15 17 -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r9a5097c r235f514 128 128 if new_lentoks > 2: 129 129 _dy = float(toks[2]) 130 has_error_dy = False if _dy ==None else True130 has_error_dy = False if _dy is None else True 131 131 132 132 # If a 4th row is present, consider it dx 133 133 if new_lentoks > 3: 134 134 _dx = float(toks[3]) 135 has_error_dx = False if _dx ==None else True135 has_error_dx = False if _dx is None else True 136 136 137 137 # Delete the previously stored lines of data candidates if -
src/sas/sascalc/dataloader/readers/associations.py
re5c09cf ra1b8fee 14 14 #copyright 2009, University of Tennessee 15 15 ############################################################################# 16 from __future__ import print_function 17 16 18 import os 17 19 import sys 18 20 import logging 19 21 import json 22 23 logger = logging.getLogger(__name__) 20 24 21 25 FILE_NAME = 'defaults.json' … … 67 71 msg = "read_associations: skipping association" 68 72 msg += " for %s\n %s" % (ext.lower(), sys.exc_value) 69 logg ing.error(msg)73 logger.error(msg) 70 74 else: 71 print "Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())75 print("Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())) 72 76 73 77 … … 81 85 :param registry_function: function to be called to register each reader 82 86 """ 83 logg ing.info("register_readers is now obsolete: use read_associations()")87 logger.info("register_readers is now obsolete: use read_associations()") 84 88 import abs_reader 85 89 import ascii_reader -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rad4632c r63d773c 135 135 "Sesans": {"storeas": "content"}, 136 136 "zacceptance": {"storeas": "float"}, 137 "yacceptance": {"storeas": "float"}, 137 138 "<any>" : ANY 138 139 } -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r8434365 r7432acb 33 33 import xml.dom.minidom 34 34 from xml.dom.minidom import parseString 35 36 logger = logging.getLogger(__name__) 35 37 36 38 PREPROCESS = "xmlpreprocess" … … 290 292 elif tagname == 'Sesans': 291 293 self.current_datainfo.isSesans = bool(data_point) 294 elif tagname == 'yacceptance': 295 self.current_datainfo.sample.yacceptance = (data_point, unit) 292 296 elif tagname == 'zacceptance': 293 297 self.current_datainfo.sample.zacceptance = (data_point, unit) … … 803 807 :param data1d: presumably a Data1D object 804 808 """ 805 if self.current_dataset ==None:809 if self.current_dataset is None: 806 810 x_vals = np.empty(0) 807 811 y_vals = np.empty(0) … … 891 895 # Write the file 892 896 file_ref = open(filename, 'w') 893 if self.encoding ==None:897 if self.encoding is None: 894 898 self.encoding = "UTF-8" 895 899 doc.write(file_ref, encoding=self.encoding, … … 1011 1015 :param entry_node: lxml node ElementTree object to be appended to 1012 1016 """ 1013 if datainfo.run ==None or datainfo.run == []:1017 if datainfo.run is None or datainfo.run == []: 1014 1018 datainfo.run.append(RUN_NAME_DEFAULT) 1015 1019 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT … … 1055 1059 sesans.text = str(datainfo.isSesans) 1056 1060 node.append(sesans) 1061 self.write_node(node, "yacceptance", datainfo.sample.yacceptance[0], 1062 {'unit': datainfo.sample.yacceptance[1]}) 1057 1063 self.write_node(node, "zacceptance", datainfo.sample.zacceptance[0], 1058 1064 {'unit': datainfo.sample.zacceptance[1]}) … … 1127 1133 self.write_node(point, "T", spectrum.transmission[i], 1128 1134 {'unit': spectrum.transmission_unit}) 1129 if spectrum.transmission_deviation !=None \1135 if spectrum.transmission_deviation is not None \ 1130 1136 and len(spectrum.transmission_deviation) >= i: 1131 1137 self.write_node(point, "Tdev", … … 1207 1213 str(datainfo.source.name)) 1208 1214 self.append(source, instr) 1209 if datainfo.source.radiation ==None or datainfo.source.radiation == '':1215 if datainfo.source.radiation is None or datainfo.source.radiation == '': 1210 1216 datainfo.source.radiation = "neutron" 1211 1217 self.write_node(source, "radiation", datainfo.source.radiation) … … 1248 1254 :param instr: lxml node ElementTree object to be appended to 1249 1255 """ 1250 if datainfo.collimation == [] or datainfo.collimation ==None:1256 if datainfo.collimation == [] or datainfo.collimation is None: 1251 1257 coll = Collimation() 1252 1258 datainfo.collimation.append(coll) … … 1293 1299 :param inst: lxml instrument node to be appended to 1294 1300 """ 1295 if datainfo.detector ==None or datainfo.detector == []:1301 if datainfo.detector is None or datainfo.detector == []: 1296 1302 det = Detector() 1297 1303 det.name = "" … … 1458 1464 local_unit = None 1459 1465 exec "local_unit = storage.%s_unit" % toks[0] 1460 if local_unit !=None and units.lower() != local_unit.lower():1466 if local_unit is not None and units.lower() != local_unit.lower(): 1461 1467 if HAS_CONVERTER == True: 1462 1468 try: … … 1471 1477 self.errors.add(err_mess) 1472 1478 if optional: 1473 logg ing.info(err_mess)1479 logger.info(err_mess) 1474 1480 else: 1475 1481 raise ValueError, err_mess … … 1480 1486 self.errors.add(err_mess) 1481 1487 if optional: 1482 logg ing.info(err_mess)1488 logger.info(err_mess) 1483 1489 else: 1484 1490 raise ValueError, err_mess -
src/sas/sascalc/dataloader/readers/danse_reader.py
r9a5097c r235f514 19 19 from sas.sascalc.dataloader.data_info import Data2D, Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 22 logger = logging.getLogger(__name__) 21 23 22 24 # Look for unit converter … … 142 144 error.append(err) 143 145 except: 144 logg ing.info("Skipping line:%s,%s" %(data_str,146 logger.info("Skipping line:%s,%s" %(data_str, 145 147 sys.exc_value)) 146 148 … … 164 166 165 167 x_vals.append(qx) 166 if xmin ==None or qx < xmin:168 if xmin is None or qx < xmin: 167 169 xmin = qx 168 if xmax ==None or qx > xmax:170 if xmax is None or qx > xmax: 169 171 xmax = qx 170 172 … … 179 181 180 182 y_vals.append(qy) 181 if ymin ==None or qy < ymin:183 if ymin is None or qy < ymin: 182 184 ymin = qy 183 if ymax ==None or qy > ymax:185 if ymax is None or qy > ymax: 184 186 ymax = qy 185 187 … … 196 198 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 197 199 sys.exc_value) 198 logg ing.info(msg)200 logger.info(msg) 199 201 200 202 # Get bin number … … 271 273 raise ValueError, msg 272 274 else: 273 logg ing.info("Danse_reader Reading %s \n" % filename)275 logger.info("Danse_reader Reading %s \n" % filename) 274 276 275 277 # Store loading process information -
src/sas/sascalc/dataloader/readers/red2d_reader.py
r9a5097c ra1b8fee 9 9 #copyright 2008, University of Tennessee 10 10 ###################################################################### 11 from __future__ import print_function 12 11 13 import os 12 14 import numpy as np … … 82 84 detector = Detector() 83 85 if len(output.detector) > 0: 84 print str(output.detector[0])86 print(str(output.detector[0])) 85 87 output.detector.append(detector) 86 88 -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r9a5097c r959eb01 16 16 from sas.sascalc.dataloader.data_info import Data2D 17 17 from sas.sascalc.dataloader.manipulations import reader2D_converter 18 18 19 logger = logging.getLogger(__name__) 20 19 21 class Reader: 20 22 """ … … 76 78 value = float(val) 77 79 except: 78 logg ing.error("tiff_reader: had to skip a non-float point")80 logger.error("tiff_reader: had to skip a non-float point") 79 81 continue 80 82 -
src/sas/sascalc/dataloader/readers/xml_reader.py
ra235f715 r235f514 18 18 from lxml import etree 19 19 from lxml.builder import E 20 21 logger = logging.getLogger(__name__) 20 22 21 23 PARSER = etree.ETCompatXMLParser(remove_comments=True, remove_pis=False) … … 71 73 self.xmlroot = self.xmldoc.getroot() 72 74 except etree.XMLSyntaxError as xml_error: 73 logg ing.info(xml_error)75 logger.info(xml_error) 74 76 except Exception: 75 77 self.xml = None … … 88 90 self.xmlroot = etree.fromstring(tag_soup) 89 91 except etree.XMLSyntaxError as xml_error: 90 logg ing.info(xml_error)92 logger.info(xml_error) 91 93 except Exception: 92 94 self.xml = None … … 102 104 self.schemadoc = etree.parse(self.schema, parser=PARSER) 103 105 except etree.XMLSyntaxError as xml_error: 104 logg ing.info(xml_error)106 logger.info(xml_error) 105 107 except Exception: 106 108 self.schema = None … … 238 240 :param name: The name of the element to be created 239 241 """ 240 if attrib ==None:242 if attrib is None: 241 243 attrib = {} 242 244 return etree.Element(name, attrib, nsmap) … … 297 299 """ 298 300 text = str(text) 299 if attrib ==None:301 if attrib is None: 300 302 attrib = {} 301 303 elem = E(elementname, attrib, text)
Note: See TracChangeset
for help on using the changeset viewer.