Changeset fadb757 in sasview for src/sas/sascalc/dataloader/readers
- Timestamp:
- May 2, 2017 7:36:21 AM (8 years ago)
- Parents:
- 658dd57 (diff), 8390cf6 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - git-author:
- Adam Washington <rprospero@…> (05/02/17 07:36:21)
- git-committer:
- GitHub <noreply@…> (05/02/17 07:36:21)
- Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c rcb9feea8 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 8 import numpy as np … … 18 18 _ZERO = 1e-16 19 19 20 20 21 class Reader: 21 22 """ 22 23 Class to load sesans files (6 columns). 23 24 """ 24 # #File type25 # File type 25 26 type_name = "SESANS" 26 27 # #Wildcards27 28 # Wildcards 28 29 type = ["SESANS files (*.ses)|*.ses", 29 30 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions31 # List of allowed extensions 31 32 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check33 34 # Flag to bypass extension check 34 35 allow_all = True 35 36 36 37 def read(self, path): 37 38 # print "reader triggered"39 40 38 """ 41 39 Load data file 42 40 43 41 :param path: file path 44 42 45 43 :return: SESANSData1D object, or None 46 44 47 45 :raise RuntimeError: when the file can't be opened 48 46 :raise ValueError: when the length of the data vectors are inconsistent … … 51 49 basename = os.path.basename(path) 52 50 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 51 if not (self.allow_all or extension.lower() in self.ext): 52 raise RuntimeError("{} has an unrecognized file extension".format(path)) 53 else: 54 raise RunetimeError("{} is not a file".format(path)) 55 with open(path, 'r') as input_f: 56 # Read in binary mode since GRASP frequently has no-ascii 57 # characters that brakes the open operation 58 line = input_f.readline() 59 params = {} 60 while line.strip() != "": 61 terms = line.strip().split("\t") 62 params[terms[0].strip()] = " ".join(terms[1:]).strip() 63 line = input_f.readline() 64 headers_temp = input_f.readline().strip().split("\t") 65 headers = {} 66 for h in headers_temp: 67 temp = h.strip().split() 68 headers[h[:-1].strip()] = temp[-1][1:-1] 69 data = np.loadtxt(input_f) 70 if data.size < 1: 71 raise RuntimeError("{} is empty".format(path)) 72 x = data[:, 0] 73 dx = data[:, 3] 74 lam = data[:, 4] 75 dlam = data[:, 5] 76 y = data[:, 1] 77 dy = data[:, 2] 78 78 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 79 lam_unit = self._header_fetch(headers, "wavelength") 80 if lam_unit == "AA": 81 lam_unit = "A" 87 82 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 83 x, x_unit = self._unit_conversion( 84 x, lam_unit, 85 self._fetch_unit(headers, "spin echo length")) 86 dx, dx_unit = self._unit_conversion( 87 dx, lam_unit, 88 self._fetch_unit(headers, "error SEL")) 89 dlam, dlam_unit = self._unit_conversion( 90 dlam, lam_unit, 91 self._fetch_unit(headers, "error wavelength")) 92 y_unit = r'\AA^{-2} cm^{-1}' 104 93 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 94 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 95 isSesans=True) 96 self.filename = output.filename = basename 97 output.xaxis(r"\rm{z}", x_unit) 98 # Adjust label to ln P/(lam^2 t), remove lam column refs 99 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 100 # Store loading process information 101 output.meta_data['loader'] = self.type_name 102 output.sample.name = params["Sample"] 103 output.sample.ID = params["DataFileTitle"] 128 104 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 105 output.sample.zacceptance = ( 106 float(self._header_fetch(params, "Q_zmax")), 107 self._fetch_unit(params, "Q_zmax")) 133 108 134 input_f.close() 109 output.sample.yacceptance = ( 110 float(self._header_fetch(params, "Q_ymax")), 111 self._fetch_unit(params, "Q_ymax")) 112 return output 135 113 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 114 @staticmethod 115 def _unit_conversion(value, value_unit, default_unit): 116 """ 117 Performs unit conversion on a measurement. 146 118 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 158 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 162 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 166 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 119 :param value: The magnitude of the measurement 120 :param value_unit: a string containing the final desired unit 121 :param default_unit: a string containing the units of the original measurement 122 :return: The magnitude of the measurement in the new units 123 """ 124 # (float, string, string) -> float 125 if has_converter and value_unit != default_unit: 169 126 data_conv_q = Converter(value_unit) 170 127 value = data_conv_q(value, units=default_unit) … … 173 130 new_unit = value_unit 174 131 return value, new_unit 132 133 @staticmethod 134 def _header_fetch(headers, key): 135 """ 136 Pull the value of a unit defined header from a dict. Example:: 137 138 d = {"Length [m]": 17} 139 self._header_fetch(d, "Length") == 17 140 141 :param header: A dictionary of values 142 :param key: A string which is a prefix for one of the keys in the dict 143 :return: The value of the dictionary for the specified key 144 """ 145 # (dict<string, x>, string) -> x 146 index = [k for k in headers.keys() 147 if k.startswith(key)][0] 148 return headers[index] 149 150 @staticmethod 151 def _fetch_unit(params, key): 152 """ 153 Pull the unit off of a dictionary header. Example:: 154 155 d = {"Length [m]": 17} 156 self._fetch_unit(d, "Length") == "m" 157 158 :param header: A dictionary of values, where the keys are strings 159 with the units for the values appended onto the string within square 160 brackets (See the example above) 161 :param key: A string with the prefix of the dictionary key whose unit 162 is being fetched 163 :return: A string containing the unit specifed in the header 164 """ 165 # (dict<string, _>, string) -> string 166 index = [k for k in params.keys() 167 if k.startswith(key)][0] 168 unit = index.strip().split()[-1][1:-1] 169 if unit.startswith(r"\A"): 170 unit = "1/A" 171 return unit -
src/sas/sascalc/dataloader/readers/IgorReader.py
rdd11014 ra1b8fee 12 12 #copyright 2008, University of Tennessee 13 13 ############################################################################# 14 from __future__ import print_function 15 14 16 import os 15 17 -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r9a5097c r235f514 128 128 if new_lentoks > 2: 129 129 _dy = float(toks[2]) 130 has_error_dy = False if _dy ==None else True130 has_error_dy = False if _dy is None else True 131 131 132 132 # If a 4th row is present, consider it dx 133 133 if new_lentoks > 3: 134 134 _dx = float(toks[3]) 135 has_error_dx = False if _dx ==None else True135 has_error_dx = False if _dx is None else True 136 136 137 137 # Delete the previously stored lines of data candidates if -
src/sas/sascalc/dataloader/readers/associations.py
re5c09cf ra1b8fee 14 14 #copyright 2009, University of Tennessee 15 15 ############################################################################# 16 from __future__ import print_function 17 16 18 import os 17 19 import sys 18 20 import logging 19 21 import json 22 23 logger = logging.getLogger(__name__) 20 24 21 25 FILE_NAME = 'defaults.json' … … 67 71 msg = "read_associations: skipping association" 68 72 msg += " for %s\n %s" % (ext.lower(), sys.exc_value) 69 logg ing.error(msg)73 logger.error(msg) 70 74 else: 71 print "Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())75 print("Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())) 72 76 73 77 … … 81 85 :param registry_function: function to be called to register each reader 82 86 """ 83 logg ing.info("register_readers is now obsolete: use read_associations()")87 logger.info("register_readers is now obsolete: use read_associations()") 84 88 import abs_reader 85 89 import ascii_reader -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rad4632c r63d773c 135 135 "Sesans": {"storeas": "content"}, 136 136 "zacceptance": {"storeas": "float"}, 137 "yacceptance": {"storeas": "float"}, 137 138 "<any>" : ANY 138 139 } -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r8434365 r7432acb 33 33 import xml.dom.minidom 34 34 from xml.dom.minidom import parseString 35 36 logger = logging.getLogger(__name__) 35 37 36 38 PREPROCESS = "xmlpreprocess" … … 290 292 elif tagname == 'Sesans': 291 293 self.current_datainfo.isSesans = bool(data_point) 294 elif tagname == 'yacceptance': 295 self.current_datainfo.sample.yacceptance = (data_point, unit) 292 296 elif tagname == 'zacceptance': 293 297 self.current_datainfo.sample.zacceptance = (data_point, unit) … … 803 807 :param data1d: presumably a Data1D object 804 808 """ 805 if self.current_dataset ==None:809 if self.current_dataset is None: 806 810 x_vals = np.empty(0) 807 811 y_vals = np.empty(0) … … 891 895 # Write the file 892 896 file_ref = open(filename, 'w') 893 if self.encoding ==None:897 if self.encoding is None: 894 898 self.encoding = "UTF-8" 895 899 doc.write(file_ref, encoding=self.encoding, … … 1011 1015 :param entry_node: lxml node ElementTree object to be appended to 1012 1016 """ 1013 if datainfo.run ==None or datainfo.run == []:1017 if datainfo.run is None or datainfo.run == []: 1014 1018 datainfo.run.append(RUN_NAME_DEFAULT) 1015 1019 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT … … 1055 1059 sesans.text = str(datainfo.isSesans) 1056 1060 node.append(sesans) 1061 self.write_node(node, "yacceptance", datainfo.sample.yacceptance[0], 1062 {'unit': datainfo.sample.yacceptance[1]}) 1057 1063 self.write_node(node, "zacceptance", datainfo.sample.zacceptance[0], 1058 1064 {'unit': datainfo.sample.zacceptance[1]}) … … 1127 1133 self.write_node(point, "T", spectrum.transmission[i], 1128 1134 {'unit': spectrum.transmission_unit}) 1129 if spectrum.transmission_deviation !=None \1135 if spectrum.transmission_deviation is not None \ 1130 1136 and len(spectrum.transmission_deviation) >= i: 1131 1137 self.write_node(point, "Tdev", … … 1207 1213 str(datainfo.source.name)) 1208 1214 self.append(source, instr) 1209 if datainfo.source.radiation ==None or datainfo.source.radiation == '':1215 if datainfo.source.radiation is None or datainfo.source.radiation == '': 1210 1216 datainfo.source.radiation = "neutron" 1211 1217 self.write_node(source, "radiation", datainfo.source.radiation) … … 1248 1254 :param instr: lxml node ElementTree object to be appended to 1249 1255 """ 1250 if datainfo.collimation == [] or datainfo.collimation ==None:1256 if datainfo.collimation == [] or datainfo.collimation is None: 1251 1257 coll = Collimation() 1252 1258 datainfo.collimation.append(coll) … … 1293 1299 :param inst: lxml instrument node to be appended to 1294 1300 """ 1295 if datainfo.detector ==None or datainfo.detector == []:1301 if datainfo.detector is None or datainfo.detector == []: 1296 1302 det = Detector() 1297 1303 det.name = "" … … 1458 1464 local_unit = None 1459 1465 exec "local_unit = storage.%s_unit" % toks[0] 1460 if local_unit !=None and units.lower() != local_unit.lower():1466 if local_unit is not None and units.lower() != local_unit.lower(): 1461 1467 if HAS_CONVERTER == True: 1462 1468 try: … … 1471 1477 self.errors.add(err_mess) 1472 1478 if optional: 1473 logg ing.info(err_mess)1479 logger.info(err_mess) 1474 1480 else: 1475 1481 raise ValueError, err_mess … … 1480 1486 self.errors.add(err_mess) 1481 1487 if optional: 1482 logg ing.info(err_mess)1488 logger.info(err_mess) 1483 1489 else: 1484 1490 raise ValueError, err_mess -
src/sas/sascalc/dataloader/readers/danse_reader.py
r9a5097c r235f514 19 19 from sas.sascalc.dataloader.data_info import Data2D, Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 22 logger = logging.getLogger(__name__) 21 23 22 24 # Look for unit converter … … 142 144 error.append(err) 143 145 except: 144 logg ing.info("Skipping line:%s,%s" %(data_str,146 logger.info("Skipping line:%s,%s" %(data_str, 145 147 sys.exc_value)) 146 148 … … 164 166 165 167 x_vals.append(qx) 166 if xmin ==None or qx < xmin:168 if xmin is None or qx < xmin: 167 169 xmin = qx 168 if xmax ==None or qx > xmax:170 if xmax is None or qx > xmax: 169 171 xmax = qx 170 172 … … 179 181 180 182 y_vals.append(qy) 181 if ymin ==None or qy < ymin:183 if ymin is None or qy < ymin: 182 184 ymin = qy 183 if ymax ==None or qy > ymax:185 if ymax is None or qy > ymax: 184 186 ymax = qy 185 187 … … 196 198 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 197 199 sys.exc_value) 198 logg ing.info(msg)200 logger.info(msg) 199 201 200 202 # Get bin number … … 271 273 raise ValueError, msg 272 274 else: 273 logg ing.info("Danse_reader Reading %s \n" % filename)275 logger.info("Danse_reader Reading %s \n" % filename) 274 276 275 277 # Store loading process information -
src/sas/sascalc/dataloader/readers/red2d_reader.py
r9a5097c ra1b8fee 9 9 #copyright 2008, University of Tennessee 10 10 ###################################################################### 11 from __future__ import print_function 12 11 13 import os 12 14 import numpy as np … … 82 84 detector = Detector() 83 85 if len(output.detector) > 0: 84 print str(output.detector[0])86 print(str(output.detector[0])) 85 87 output.detector.append(detector) 86 88 -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r9a5097c r959eb01 16 16 from sas.sascalc.dataloader.data_info import Data2D 17 17 from sas.sascalc.dataloader.manipulations import reader2D_converter 18 18 19 logger = logging.getLogger(__name__) 20 19 21 class Reader: 20 22 """ … … 76 78 value = float(val) 77 79 except: 78 logg ing.error("tiff_reader: had to skip a non-float point")80 logger.error("tiff_reader: had to skip a non-float point") 79 81 continue 80 82 -
src/sas/sascalc/dataloader/readers/xml_reader.py
ra235f715 r235f514 18 18 from lxml import etree 19 19 from lxml.builder import E 20 21 logger = logging.getLogger(__name__) 20 22 21 23 PARSER = etree.ETCompatXMLParser(remove_comments=True, remove_pis=False) … … 71 73 self.xmlroot = self.xmldoc.getroot() 72 74 except etree.XMLSyntaxError as xml_error: 73 logg ing.info(xml_error)75 logger.info(xml_error) 74 76 except Exception: 75 77 self.xml = None … … 88 90 self.xmlroot = etree.fromstring(tag_soup) 89 91 except etree.XMLSyntaxError as xml_error: 90 logg ing.info(xml_error)92 logger.info(xml_error) 91 93 except Exception: 92 94 self.xml = None … … 102 104 self.schemadoc = etree.parse(self.schema, parser=PARSER) 103 105 except etree.XMLSyntaxError as xml_error: 104 logg ing.info(xml_error)106 logger.info(xml_error) 105 107 except Exception: 106 108 self.schema = None … … 238 240 :param name: The name of the element to be created 239 241 """ 240 if attrib ==None:242 if attrib is None: 241 243 attrib = {} 242 244 return etree.Element(name, attrib, nsmap) … … 297 299 """ 298 300 text = str(text) 299 if attrib ==None:301 if attrib is None: 300 302 attrib = {} 301 303 elem = E(elementname, attrib, text)
Note: See TracChangeset
for help on using the changeset viewer.