Changeset 2f85af7 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Jul 31, 2017 7:25:56 AM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- a78433dd
- Parents:
- fafe52a
- Location:
- src/sas/sascalc/dataloader
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/file_reader_base_class.py
r9d786e5 r2f85af7 155 155 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 156 156 dataset.x_bins = dataset.qx_data[:int(n_cols)] 157 dataset.data = dataset.data.flatten() 158 else: 159 dataset.y_bins = [] 160 dataset.x_bins = [] 161 dataset.data = dataset.data.flatten() 157 dataset.data = dataset.data.flatten() 162 158 final_list.append(dataset) 163 159 self.output = final_list -
src/sas/sascalc/dataloader/readers/red2d_reader.py
r959eb01 r2f85af7 5 5 #This software was developed by the University of Tennessee as part of the 6 6 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 7 #project funded by the US National Science Foundation. 7 #project funded by the US National Science Foundation. 8 8 #See the license text in license.txt 9 9 #copyright 2008, University of Tennessee … … 12 12 import numpy as np 13 13 import math 14 from sas.sascalc.dataloader.data_info import Data2D, Detector 14 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector 15 from sas.sascalc.dataloader.file_reader_base_class import FileReader 16 from sas.sascalc.dataloader.loader_exceptions import FileContentsException 15 17 16 18 # Look for unit converter … … 20 22 except: 21 23 has_converter = False 22 23 24 25 24 26 def check_point(x_point): 25 27 """ … … 31 33 except: 32 34 return 0 33 34 35 class Reader :35 36 37 class Reader(FileReader): 36 38 """ Simple data reader for Igor data files """ 37 39 ## File type … … 41 43 ## Extension 42 44 ext = ['.DAT', '.dat'] 43 45 44 46 def write(self, filename, data): 45 47 """ 46 48 Write to .dat 47 49 48 50 :param filename: file name to write 49 51 :param data: data2D … … 51 53 import time 52 54 # Write the file 53 fd = open(filename, 'w') 54 t = time.localtime() 55 time_str = time.strftime("%H:%M on %b %d %y", t) 56 57 header_str = "Data columns are Qx - Qy - I(Qx,Qy)\n\nASCII data" 58 header_str += " created at %s \n\n" % time_str 59 # simple 2D header 60 fd.write(header_str) 61 # write qx qy I values 62 for i in range(len(data.data)): 63 fd.write("%g %g %g\n" % (data.qx_data[i], 64 data.qy_data[i], 65 data.data[i])) 66 # close 67 fd.close() 68 69 def read(self, filename=None): 70 """ Read file """ 71 if not os.path.isfile(filename): 72 raise ValueError, \ 73 "Specified file %s is not a regular file" % filename 74 55 try: 56 fd = open(filename, 'w') 57 t = time.localtime() 58 time_str = time.strftime("%H:%M on %b %d %y", t) 59 60 header_str = "Data columns are Qx - Qy - I(Qx,Qy)\n\nASCII data" 61 header_str += " created at %s \n\n" % time_str 62 # simple 2D header 63 fd.write(header_str) 64 # write qx qy I values 65 for i in range(len(data.data)): 66 fd.write("%g %g %g\n" % (data.qx_data[i], 67 data.qy_data[i], 68 data.data[i])) 69 finally: 70 fd.close() 71 72 def get_file_contents(self): 75 73 # Read file 76 f = open(filename, 'r') 77 buf = f.read() 78 f.close() 74 buf = self.f_open.read() 75 self.f_open.close() 79 76 # Instantiate data object 80 output = Data2D() 81 output.filename = os.path.basename(filename) 82 detector = Detector() 83 if len(output.detector) > 0: 84 print str(output.detector[0]) 85 output.detector.append(detector) 86 77 self.current_dataset = plottable_2D() 78 self.current_datainfo = DataInfo() 79 self.current_datainfo.filename = os.path.basename(self.f_open.name) 80 self.current_datainfo.detector.append(Detector()) 81 87 82 # Get content 88 data Started = False89 83 data_started = False 84 90 85 ## Defaults 91 86 lines = buf.split('\n') 92 87 x = [] 93 88 y = [] 94 89 95 90 wavelength = None 96 91 distance = None 97 92 transmission = None 98 93 99 94 pixel_x = None 100 95 pixel_y = None 101 102 isInfo = False 103 isCenter = False 104 105 data_conv_q = None 106 data_conv_i = None 107 108 # Set units: This is the unit assumed for Q and I in the data file. 109 if has_converter == True and output.Q_unit != '1/A': 110 data_conv_q = Converter('1/A') 111 # Test it 112 data_conv_q(1.0, output.Q_unit) 113 114 if has_converter == True and output.I_unit != '1/cm': 115 data_conv_i = Converter('1/cm') 116 # Test it 117 data_conv_i(1.0, output.I_unit) 118 119 96 97 is_info = False 98 is_center = False 99 120 100 # Remove the last lines before the for loop if the lines are empty 121 101 # to calculate the exact number of data points … … 133 113 ## Reading the header applies only to IGOR/NIST 2D q_map data files 134 114 # Find setup info line 135 if is Info:136 is Info = False115 if is_info: 116 is_info = False 137 117 line_toks = line.split() 138 118 # Wavelength in Angstrom … … 141 121 # Units 142 122 if has_converter == True and \ 143 output.source.wavelength_unit != 'A':123 self.current_datainfo.source.wavelength_unit != 'A': 144 124 conv = Converter('A') 145 125 wavelength = conv(wavelength, 146 units= output.source.wavelength_unit)126 units=self.current_datainfo.source.wavelength_unit) 147 127 except: 148 128 #Not required … … 152 132 distance = float(line_toks[3]) 153 133 # Units 154 if has_converter == True and detector.distance_unit != 'm':134 if has_converter == True and self.current_datainfo.detector[0].distance_unit != 'm': 155 135 conv = Converter('m') 156 distance = conv(distance, units=detector.distance_unit) 136 distance = conv(distance, 137 units=self.current_datainfo.detector[0].distance_unit) 157 138 except: 158 139 #Not required 159 140 pass 160 141 161 142 # Distance in meters 162 143 try: … … 165 146 #Not required 166 147 pass 167 148 168 149 if line.count("LAMBDA") > 0: 169 is Info = True170 150 is_info = True 151 171 152 # Find center info line 172 if is Center:173 is Center = False153 if is_center: 154 is_center = False 174 155 line_toks = line.split() 175 156 # Center in bin number … … 178 159 179 160 if line.count("BCENT") > 0: 180 is Center = True161 is_center = True 181 162 # Check version 182 163 if line.count("Data columns") > 0: … … 185 166 # Find data start 186 167 if line.count("ASCII data") > 0: 187 data Started = True168 data_started = True 188 169 continue 189 170 190 171 ## Read and get data. 191 if data Started == True:172 if data_started == True: 192 173 line_toks = line.split() 193 174 if len(line_toks) == 0: 194 175 #empty line 195 176 continue 196 # the number of columns must be stayed same 177 # the number of columns must be stayed same 197 178 col_num = len(line_toks) 198 179 break … … 202 183 # index for lines_array 203 184 lines_index = np.arange(len(lines)) 204 185 205 186 # get the data lines 206 187 data_lines = lines_array[lines_index >= (line_num - 1)] … … 211 192 # split all data to one big list w/" "separator 212 193 data_list = data_list.split() 213 194 214 195 # Check if the size is consistent with data, otherwise 215 196 #try the tab(\t) separator … … 231 212 data_point = data_array.reshape(row_num, col_num).transpose() 232 213 except: 233 msg = "red2d_reader : Can't read this file: Not a proper file format"234 raise ValueError, msg214 msg = "red2d_reader can't read this file: Incorrect number of data points provided." 215 raise FileContentsException(msg) 235 216 ## Get the all data: Let's HARDcoding; Todo find better way 236 217 # Defaults … … 255 236 #if col_num > (6 + ver): mask[data_point[(6 + ver)] < 1] = False 256 237 q_data = np.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) 257 258 # Extra protection(it is needed for some data files): 238 239 # Extra protection(it is needed for some data files): 259 240 # If all mask elements are False, put all True 260 241 if not mask.any(): 261 242 mask[mask == False] = True 262 243 263 244 # Store limits of the image in q space 264 245 xmin = np.min(qx_data) … … 267 248 ymax = np.max(qy_data) 268 249 269 # units270 if has_converter == True and output.Q_unit != '1/A':271 xmin = data_conv_q(xmin, units=output.Q_unit)272 xmax = data_conv_q(xmax, units=output.Q_unit)273 ymin = data_conv_q(ymin, units=output.Q_unit)274 ymax = data_conv_q(ymax, units=output.Q_unit)275 276 250 ## calculate the range of the qx and qy_data 277 251 x_size = math.fabs(xmax - xmin) 278 252 y_size = math.fabs(ymax - ymin) 279 253 280 254 # calculate the number of pixels in the each axes 281 255 npix_y = math.floor(math.sqrt(len(data))) 282 256 npix_x = math.floor(len(data) / npix_y) 283 257 284 258 # calculate the size of bins 285 259 xstep = x_size / (npix_x - 1) 286 260 ystep = y_size / (npix_y - 1) 287 261 288 262 # store x and y axis bin centers in q space 289 263 x_bins = np.arange(xmin, xmax + xstep, xstep) 290 264 y_bins = np.arange(ymin, ymax + ystep, ystep) 291 265 292 266 # get the limits of q values 293 267 xmin = xmin - xstep / 2 … … 295 269 ymin = ymin - ystep / 2 296 270 ymax = ymax + ystep / 2 297 271 298 272 #Store data in outputs 299 273 #TODO: Check the lengths 300 output.data = data274 self.current_dataset.data = data 301 275 if (err_data == 1).all(): 302 output.err_data = np.sqrt(np.abs(data))303 output.err_data[output.err_data == 0.0] = 1.0276 self.current_dataset.err_data = np.sqrt(np.abs(data)) 277 self.current_dataset.err_data[self.current_dataset.err_data == 0.0] = 1.0 304 278 else: 305 output.err_data = err_data306 307 output.qx_data = qx_data308 output.qy_data = qy_data309 output.q_data = q_data310 output.mask = mask311 312 output.x_bins = x_bins313 output.y_bins = y_bins314 315 output.xmin = xmin316 output.xmax = xmax317 output.ymin = ymin318 output.ymax = ymax319 320 output.source.wavelength = wavelength321 279 self.current_dataset.err_data = err_data 280 281 self.current_dataset.qx_data = qx_data 282 self.current_dataset.qy_data = qy_data 283 self.current_dataset.q_data = q_data 284 self.current_dataset.mask = mask 285 286 self.current_dataset.x_bins = x_bins 287 self.current_dataset.y_bins = y_bins 288 289 self.current_dataset.xmin = xmin 290 self.current_dataset.xmax = xmax 291 self.current_dataset.ymin = ymin 292 self.current_dataset.ymax = ymax 293 294 self.current_datainfo.source.wavelength = wavelength 295 322 296 # Store pixel size in mm 323 detector.pixel_size.x = pixel_x324 detector.pixel_size.y = pixel_y325 297 self.current_datainfo.detector[0].pixel_size.x = pixel_x 298 self.current_datainfo.detector[0].pixel_size.y = pixel_y 299 326 300 # Store the sample to detector distance 327 detector.distance = distance328 301 self.current_datainfo.detector[0].distance = distance 302 329 303 # optional data: if all of dq data == 0, do not pass to output 330 304 if len(dqx_data) == len(qx_data) and dqx_data.any() != 0: … … 338 312 cos_th = qx_data / diag 339 313 sin_th = qy_data / diag 340 output.dqx_data = np.sqrt((dqx_data * cos_th) * \314 self.current_dataset.dqx_data = np.sqrt((dqx_data * cos_th) * \ 341 315 (dqx_data * cos_th) \ 342 316 + (dqy_data * sin_th) * \ 343 317 (dqy_data * sin_th)) 344 output.dqy_data = np.sqrt((dqx_data * sin_th) * \318 self.current_dataset.dqy_data = np.sqrt((dqx_data * sin_th) * \ 345 319 (dqx_data * sin_th) \ 346 320 + (dqy_data * cos_th) * \ 347 321 (dqy_data * cos_th)) 348 322 else: 349 output.dqx_data = dqx_data350 output.dqy_data = dqy_data323 self.current_dataset.dqx_data = dqx_data 324 self.current_dataset.dqy_data = dqy_data 351 325 352 326 # Units of axes 353 if data_conv_q is not None: 354 output.xaxis("\\rm{Q_{x}}", output.Q_unit) 355 output.yaxis("\\rm{Q_{y}}", output.Q_unit) 356 else: 357 output.xaxis("\\rm{Q_{x}}", 'A^{-1}') 358 output.yaxis("\\rm{Q_{y}}", 'A^{-1}') 359 if data_conv_i is not None: 360 output.zaxis("\\rm{Intensity}", output.I_unit) 361 else: 362 output.zaxis("\\rm{Intensity}", "cm^{-1}") 363 327 self.current_dataset.xaxis("\\rm{Q_{x}}", 'A^{-1}') 328 self.current_dataset.yaxis("\\rm{Q_{y}}", 'A^{-1}') 329 self.current_dataset.zaxis("\\rm{Intensity}", "cm^{-1}") 330 364 331 # Store loading process information 365 output.meta_data['loader'] = self.type_name366 367 return output332 self.current_datainfo.meta_data['loader'] = self.type_name 333 334 self.send_to_output()
Note: See TracChangeset
for help on using the changeset viewer.