Changeset 8390cf6 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Apr 5, 2017 3:14:10 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 2b310602, fadb757
- Parents:
- cb9feea8 (diff), a2e980b (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c rcb9feea8 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 8 import numpy as np … … 18 18 _ZERO = 1e-16 19 19 20 20 21 class Reader: 21 22 """ 22 23 Class to load sesans files (6 columns). 23 24 """ 24 # #File type25 # File type 25 26 type_name = "SESANS" 26 27 # #Wildcards27 28 # Wildcards 28 29 type = ["SESANS files (*.ses)|*.ses", 29 30 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions31 # List of allowed extensions 31 32 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check33 34 # Flag to bypass extension check 34 35 allow_all = True 35 36 36 37 def read(self, path): 37 38 # print "reader triggered"39 40 38 """ 41 39 Load data file 42 40 43 41 :param path: file path 44 42 45 43 :return: SESANSData1D object, or None 46 44 47 45 :raise RuntimeError: when the file can't be opened 48 46 :raise ValueError: when the length of the data vectors are inconsistent … … 51 49 basename = os.path.basename(path) 52 50 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 51 if not (self.allow_all or extension.lower() in self.ext): 52 raise RuntimeError("{} has an unrecognized file extension".format(path)) 53 else: 54 raise RunetimeError("{} is not a file".format(path)) 55 with open(path, 'r') as input_f: 56 # Read in binary mode since GRASP frequently has no-ascii 57 # characters that brakes the open operation 58 line = input_f.readline() 59 params = {} 60 while line.strip() != "": 61 terms = line.strip().split("\t") 62 params[terms[0].strip()] = " ".join(terms[1:]).strip() 63 line = input_f.readline() 64 headers_temp = input_f.readline().strip().split("\t") 65 headers = {} 66 for h in headers_temp: 67 temp = h.strip().split() 68 headers[h[:-1].strip()] = temp[-1][1:-1] 69 data = np.loadtxt(input_f) 70 if data.size < 1: 71 raise RuntimeError("{} is empty".format(path)) 72 x = data[:, 0] 73 dx = data[:, 3] 74 lam = data[:, 4] 75 dlam = data[:, 5] 76 y = data[:, 1] 77 dy = data[:, 2] 78 78 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 79 lam_unit = self._header_fetch(headers, "wavelength") 80 if lam_unit == "AA": 81 lam_unit = "A" 87 82 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 83 x, x_unit = self._unit_conversion( 84 x, lam_unit, 85 self._fetch_unit(headers, "spin echo length")) 86 dx, dx_unit = self._unit_conversion( 87 dx, lam_unit, 88 self._fetch_unit(headers, "error SEL")) 89 dlam, dlam_unit = self._unit_conversion( 90 dlam, lam_unit, 91 self._fetch_unit(headers, "error wavelength")) 92 y_unit = r'\AA^{-2} cm^{-1}' 104 93 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 94 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 95 isSesans=True) 96 self.filename = output.filename = basename 97 output.xaxis(r"\rm{z}", x_unit) 98 # Adjust label to ln P/(lam^2 t), remove lam column refs 99 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 100 # Store loading process information 101 output.meta_data['loader'] = self.type_name 102 output.sample.name = params["Sample"] 103 output.sample.ID = params["DataFileTitle"] 128 104 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 105 output.sample.zacceptance = ( 106 float(self._header_fetch(params, "Q_zmax")), 107 self._fetch_unit(params, "Q_zmax")) 133 108 134 input_f.close() 109 output.sample.yacceptance = ( 110 float(self._header_fetch(params, "Q_ymax")), 111 self._fetch_unit(params, "Q_ymax")) 112 return output 135 113 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 114 @staticmethod 115 def _unit_conversion(value, value_unit, default_unit): 116 """ 117 Performs unit conversion on a measurement. 146 118 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 158 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 162 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 166 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 119 :param value: The magnitude of the measurement 120 :param value_unit: a string containing the final desired unit 121 :param default_unit: a string containing the units of the original measurement 122 :return: The magnitude of the measurement in the new units 123 """ 124 # (float, string, string) -> float 125 if has_converter and value_unit != default_unit: 169 126 data_conv_q = Converter(value_unit) 170 127 value = data_conv_q(value, units=default_unit) … … 173 130 new_unit = value_unit 174 131 return value, new_unit 132 133 @staticmethod 134 def _header_fetch(headers, key): 135 """ 136 Pull the value of a unit defined header from a dict. Example:: 137 138 d = {"Length [m]": 17} 139 self._header_fetch(d, "Length") == 17 140 141 :param header: A dictionary of values 142 :param key: A string which is a prefix for one of the keys in the dict 143 :return: The value of the dictionary for the specified key 144 """ 145 # (dict<string, x>, string) -> x 146 index = [k for k in headers.keys() 147 if k.startswith(key)][0] 148 return headers[index] 149 150 @staticmethod 151 def _fetch_unit(params, key): 152 """ 153 Pull the unit off of a dictionary header. Example:: 154 155 d = {"Length [m]": 17} 156 self._fetch_unit(d, "Length") == "m" 157 158 :param header: A dictionary of values, where the keys are strings 159 with the units for the values appended onto the string within square 160 brackets (See the example above) 161 :param key: A string with the prefix of the dictionary key whose unit 162 is being fetched 163 :return: A string containing the unit specifed in the header 164 """ 165 # (dict<string, _>, string) -> string 166 index = [k for k in params.keys() 167 if k.startswith(key)][0] 168 unit = index.strip().split()[-1][1:-1] 169 if unit.startswith(r"\A"): 170 unit = "1/A" 171 return unit
Note: See TracChangeset
for help on using the changeset viewer.