Changeset b2c28a5 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Aug 1, 2017 6:36:13 AM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 248ff73
- Parents:
- 713a047
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c rb2c28a5 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 8 import numpy as np 9 9 import os 10 from sas.sascalc.dataloader.data_info import Data1D 10 from sas.sascalc.dataloader.file_reader_base_class import FileReader 11 from sas.sascalc.dataloader.data_info import plottable_1D, DataInfo 12 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException 11 13 12 14 # Check whether we have a converter available … … 18 20 _ZERO = 1e-16 19 21 20 class Reader :22 class Reader(FileReader): 21 23 """ 22 24 Class to load sesans files (6 columns). … … 24 26 ## File type 25 27 type_name = "SESANS" 26 28 27 29 ## Wildcards 28 30 type = ["SESANS files (*.ses)|*.ses", … … 30 32 ## List of allowed extensions 31 33 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 34 33 35 ## Flag to bypass extension check 34 allow_all = True 35 36 def read(self, path): 37 38 # print "reader triggered" 39 40 """ 41 Load data file 42 43 :param path: file path 44 45 :return: SESANSData1D object, or None 46 47 :raise RuntimeError: when the file can't be opened 48 :raise ValueError: when the length of the data vectors are inconsistent 49 """ 50 if os.path.isfile(path): 51 basename = os.path.basename(path) 52 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 36 allow_all = False 78 37 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 38 def get_file_contents(self): 39 self.current_datainfo = DataInfo() 40 self.current_dataset = plottable_1D(np.array([]), np.array([])) 41 self.current_datainfo.isSesans = True 42 self.output = [] 87 43 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 44 error_message = "" 45 loaded_correctly = True 104 46 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 47 import pdb; pdb.set_trace() 128 48 129 x,y,lam,dy,dx,dlam = [ 130 np.asarray(v, 'double') 131 for v in (x,y,lam,dy,dx,dlam) 132 ] 49 buff = self.f_open.read() 50 lines = buff.splitlines() 133 51 134 input_f.close()52 self.current_datainfo.filename = os.path.basename(self.f_open.name) 135 53 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 54 paramnames=[] 55 paramvals=[] 56 zvals=[] 57 dzvals=[] 58 lamvals=[] 59 dlamvals=[] 60 Pvals=[] 61 dPvals=[] 146 62 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 63 for line in lines: 64 # Initial try for CSV (split on ,) 65 line=line.strip() 66 toks = line.split('\t') 67 if len(toks)==2: 68 paramnames.append(toks[0]) 69 paramvals.append(toks[1]) 70 elif len(toks)>5: 71 zvals.append(toks[0]) 72 dzvals.append(toks[3]) 73 lamvals.append(toks[4]) 74 dlamvals.append(toks[5]) 75 Pvals.append(toks[1]) 76 dPvals.append(toks[2]) 77 else: 78 continue 158 79 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 80 x=[] 81 y=[] 82 lam=[] 83 dx=[] 84 dy=[] 85 dlam=[] 86 lam_header = lamvals[0].split() 87 data_conv_z = None 88 default_z_unit = "A" 89 data_conv_P = None 90 default_p_unit = " " # Adjust unit for axis (L^-3) 91 lam_unit = lam_header[1].replace("[","").replace("]","") 92 if lam_unit == 'AA': 93 lam_unit = 'A' 94 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 95 valrange=range(1, len(zvals)) 96 try: 97 for i in valrange: 98 x.append(float(zvals[i])) 99 y.append(float(Pvals[i])) 100 lam.append(float(lamvals[i])) 101 dy.append(float(dPvals[i])) 102 dx.append(float(dzvals[i])) 103 dlam.append(float(dlamvals[i])) 104 except ValueError as val_err: 105 err_msg = "Invalid float" 106 err_msg += ":".join(val_err.message.split(":")[1:]) 107 raise FileContentsException(err_msg) 162 108 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 109 x, y, lam, dy, dx, dlam = [ 110 np.asarray(v, 'double') 111 for v in (x, y, lam, dy, dx, dlam) 112 ] 113 114 self.f_open.close() 115 116 self.current_dataset.x, self.current_dataset._xunit = self._unit_conversion(x, lam_unit, default_z_unit) 117 self.current_dataset.y = y 118 self.current_dataset._yunit = r'\AA^{-2} cm^{-1}' # output y_unit added 119 self.current_dataset.dx, _ = self._unit_conversion(dx, lam_unit, default_z_unit) 120 self.current_dataset.dy = dy 121 self.current_dataset.lam, _ = self._unit_conversion(lam, lam_unit, default_z_unit) 122 self.current_dataset.dlam, _ = self._unit_conversion(dlam, lam_unit, default_z_unit) 123 124 self.current_dataset.xaxis(r"\rm{z}", self.current_dataset._xunit) 125 self.current_dataset.yaxis(r"\rm{ln(P)/(t \lambda^2)}", self.current_dataset._yunit) # Adjust label to ln P/(lam^2 t), remove lam column refs 126 127 # Store loading process information 128 self.current_datainfo.meta_data['loader'] = self.type_name 129 try: 130 self.current_datainfo.sample.thickness = float(paramvals[6]) 131 except ValueError as val_err: 132 loaded_correctly = False 133 error_message += "\nInvalid sample thickness '{}'".format(paramvals[6]) 134 135 self.current_datainfo.sample.name = paramvals[1] 136 self.current_datainfo.sample.ID = paramvals[0] 137 zaccept_unit_split = paramnames[7].split("[") 138 zaccept_unit = zaccept_unit_split[1].replace("]","") 139 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 140 zaccept_unit = "1/A" 141 self.current_datainfo.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 142 143 self.current_datainfo.vars = varheader 144 145 if len(self.current_dataset.x) < 1: 146 raise FileContentsException("No data points in file.") 147 148 self.send_to_output() 149 150 if not loaded_correctly: 151 raise DataReaderException(error_message) 166 152 167 153 def _unit_conversion(self, value, value_unit, default_unit):
Note: See TracChangeset
for help on using the changeset viewer.