Changeset 7d6351e in sasview
- Timestamp:
- Apr 27, 2012 11:22:31 AM (13 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 4a96b8b
- Parents:
- f60a8c2
- Location:
- sansdataloader/src/sans/dataloader/readers
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
sansdataloader/src/sans/dataloader/readers/IgorReader.py
rad8034f r7d6351e 1 1 """ 2 IGOR 2D reduced file reader 3 """ 2 4 ############################################################################ 3 5 #This software was developed by the University of Tennessee as part of the … … 10 12 #copyright 2008, University of Tennessee 11 13 ############################################################################# 12 13 """14 IGOR 2D reduced file reader15 """16 17 14 import os 18 #import sys19 15 import numpy 20 16 import math … … 31 27 has_converter = False 32 28 29 33 30 class Reader: 34 31 """ Simple data reader for Igor data files """ 35 32 ## File type 36 type_name = "IGOR 2D" 33 type_name = "IGOR 2D" 37 34 ## Wildcards 38 35 type = ["IGOR 2D files (*.ASC)|*.ASC"] … … 40 37 ext=['.ASC', '.asc'] 41 38 42 def read(self, filename=None):39 def read(self, filename=None): 43 40 """ Read file """ 44 41 if not os.path.isfile(filename): … … 47 44 48 45 # Read file 49 f = open(filename, 'r')46 f = open(filename, 'r') 50 47 buf = f.read() 51 48 … … 54 51 output.filename = os.path.basename(filename) 55 52 detector = Detector() 56 if len(output.detector)>0: print str(output.detector[0]) 53 if len(output.detector) > 0: 54 print str(output.detector[0]) 57 55 output.detector.append(detector) 58 56 59 57 # Get content 60 58 dataStarted = False 61 62 59 63 60 lines = buf.split('\n') … … 91 88 data_conv_i = Converter('1/cm') 92 89 # Test it 93 data_conv_i(1.0, output.I_unit) 90 data_conv_i(1.0, output.I_unit) 94 91 95 92 for line in lines: … … 117 114 118 115 i_tot_row += 1 119 i_tot_row =math.ceil(math.sqrt(i_tot_row))-1116 i_tot_row = math.ceil(math.sqrt(i_tot_row)) - 1 120 117 #print "i_tot", i_tot_row 121 size_x = i_tot_row #192#128122 size_y = i_tot_row #192#128123 output.data = numpy.zeros([size_x, size_y])124 output.err_data = numpy.zeros([size_x, size_y])118 size_x = i_tot_row # 192#128 119 size_y = i_tot_row # 192#128 120 output.data = numpy.zeros([size_x, size_y]) 121 output.err_data = numpy.zeros([size_x, size_y]) 125 122 126 123 #Read Header and 2D data … … 151 148 raise ValueError, msg 152 149 153 if line.count("LAMBDA") >0:150 if line.count("LAMBDA") > 0: 154 151 isInfo = True 155 152 156 153 # Find center info line 157 154 if isCenter: 158 isCenter = False 155 isCenter = False 159 156 line_toks = line.split() 160 157 161 # Center in bin number: Must substrate 1 because 158 # Center in bin number: Must substrate 1 because 162 159 #the index starts from 1 163 center_x = float(line_toks[0]) -1164 center_y = float(line_toks[1]) -1165 166 if line.count("BCENT") >0:160 center_x = float(line_toks[0]) - 1 161 center_y = float(line_toks[1]) - 1 162 163 if line.count("BCENT") > 0: 167 164 isCenter = True 168 165 169 170 166 # Find data start 171 167 if line.count("***")>0: … … 188 184 189 185 # Get bin number 190 if math.fmod(itot, i_tot_row) ==0:186 if math.fmod(itot, i_tot_row) == 0: 191 187 i_x = 0 192 188 i_y += 1 … … 195 191 196 192 output.data[i_y][i_x] = value 197 ncounts += 1 193 ncounts += 1 198 194 199 195 # Det 640 x 640 mm 200 196 # Q = 4pi/lambda sin(theta/2) 201 197 # Bin size is 0.5 cm 202 #REmoved +1 from theta = (i_x-center_x+1)*0.5 / distance 198 #REmoved +1 from theta = (i_x-center_x+1)*0.5 / distance 203 199 # / 100.0 and 204 200 #REmoved +1 from theta = (i_y-center_y+1)*0.5 / … … 206 202 #ToDo: Need complete check if the following 207 203 # covert process is consistent with fitting.py. 208 theta = (i_x -center_x)*0.5 / distance / 100.0209 qx = 4.0 *math.pi/wavelength * math.sin(theta/2.0)204 theta = (i_x - center_x) * 0.5 / distance / 100.0 205 qx = 4.0 * math.pi / wavelength * math.sin(theta/2.0) 210 206 211 207 if has_converter == True and output.Q_unit != '1/A': 212 208 qx = data_conv_q(qx, units=output.Q_unit) 213 209 214 if xmin ==None or qx<xmin:210 if xmin == None or qx < xmin: 215 211 xmin = qx 216 if xmax ==None or qx>xmax:212 if xmax == None or qx > xmax: 217 213 xmax = qx 218 214 219 theta = (i_y -center_y)*0.5 / distance / 100.0220 qy = 4.0 *math.pi/wavelength * math.sin(theta/2.0)215 theta = (i_y - center_y) * 0.5 / distance / 100.0 216 qy = 4.0 * math.pi / wavelength * math.sin(theta / 2.0) 221 217 222 218 if has_converter == True and output.Q_unit != '1/A': 223 219 qy = data_conv_q(qy, units=output.Q_unit) 224 220 225 if ymin ==None or qy<ymin:221 if ymin == None or qy < ymin: 226 222 ymin = qy 227 if ymax ==None or qy>ymax:223 if ymax == None or qy > ymax: 228 224 ymax = qy 229 225 … … 237 233 238 234 theta = 0.25 / distance / 100.0 239 xstep = 4.0 *math.pi/wavelength * math.sin(theta/2.0)235 xstep = 4.0 * math.pi / wavelength * math.sin(theta / 2.0) 240 236 241 237 theta = 0.25 / distance / 100.0 242 ystep = 4.0 *math.pi/wavelength * math.sin(theta/2.0)238 ystep = 4.0 * math.pi/ wavelength * math.sin(theta / 2.0) 243 239 244 240 # Store all data ###################################### 245 241 # Store wavelength 246 if has_converter ==True and output.source.wavelength_unit != 'A':242 if has_converter == True and output.source.wavelength_unit != 'A': 247 243 conv = Converter('A') 248 244 wavelength = conv(wavelength, units=output.source.wavelength_unit) … … 250 246 251 247 # Store distance 252 if has_converter ==True and detector.distance_unit != 'm':248 if has_converter == True and detector.distance_unit != 'm': 253 249 conv = Converter('m') 254 250 distance = conv(distance, units=detector.distance_unit) … … 260 256 # Store pixel size 261 257 pixel = 5.0 262 if has_converter ==True and detector.pixel_size_unit != 'mm':258 if has_converter == True and detector.pixel_size_unit != 'mm': 263 259 conv = Converter('mm') 264 260 pixel = conv(pixel, units=detector.pixel_size_unit) … … 267 263 268 264 # Store beam center in distance units 269 detector.beam_center.x = center_x*pixel 270 detector.beam_center.y = center_y*pixel 271 265 detector.beam_center.x = center_x * pixel 266 detector.beam_center.y = center_y * pixel 272 267 273 268 # Store limits of the image (2D array) 274 xmin =xmin-xstep/2.0275 xmax =xmax+xstep/2.0276 ymin =ymin-ystep/2.0277 ymax =ymax+ystep/2.0269 xmin = xmin - xstep / 2.0 270 xmax = xmax + xstep / 2.0 271 ymin = ymin - ystep / 2.0 272 ymax = ymax + ystep / 2.0 278 273 if has_converter == True and output.Q_unit != '1/A': 279 274 xmin = data_conv_q(xmin, units=output.Q_unit) … … 287 282 288 283 # Store x and y axis bin centers 289 output.x_bins 290 output.y_bins 284 output.x_bins = x 285 output.y_bins = y 291 286 292 287 # Units … … 301 296 output.zaxis("\\rm{Intensity}", output.I_unit) 302 297 else: 303 output.zaxis("\\rm{Intensity}", "cm^{-1}")298 output.zaxis("\\rm{Intensity}", "cm^{-1}") 304 299 305 300 # Store loading process information … … 308 303 309 304 return output 310 311 if __name__ == "__main__":312 reader = Reader()313 print reader.read("../test/MAR07232_rest.ASC")314 -
sansdataloader/src/sans/dataloader/readers/abs_reader.py
raac129aa r7d6351e 4 4 #This software was developed by the University of Tennessee as part of the 5 5 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 6 #project funded by the US National Science Foundation. 6 #project funded by the US National Science Foundation. 7 7 #See the license text in license.txt 8 8 #copyright 2008, University of Tennessee … … 20 20 has_converter = False 21 21 22 22 23 class Reader: 23 24 """ … … 25 26 """ 26 27 ## File type 27 type_name = "IGOR 1D" 28 type_name = "IGOR 1D" 28 29 ## Wildcards 29 30 type = ["IGOR 1D files (*.abs)|*.abs"] 30 31 ## List of allowed extensions 31 ext = ['.abs', '.ABS'] 32 ext = ['.abs', '.ABS'] 32 33 33 34 def read(self, path): … … 43 44 """ 44 45 if os.path.isfile(path): 45 basename 46 basename = os.path.basename(path) 46 47 root, extension = os.path.splitext(basename) 47 48 if extension.lower() in self.ext: 48 49 try: 49 input_f = 50 except 50 input_f = open(path,'r') 51 except: 51 52 raise RuntimeError, "abs_reader: cannot open %s" % path 52 53 buff = input_f.read() … … 91 92 output.source.wavelength_unit != 'A': 92 93 conv = Converter('A') 93 output.source.wavelength = conv(value, 94 output.source.wavelength = conv(value, 94 95 units=output.source.wavelength_unit) 95 96 else: … … 99 100 msg = "abs_reader: cannot open %s" % path 100 101 raise RuntimeError, msg 101 #raise ValueError,"IgorReader: can't read this file,102 # missing wavelength"103 102 104 103 # Distance in meters … … 108 107 detector.distance_unit != 'm': 109 108 conv = Converter('m') 110 detector.distance = conv(value, 109 detector.distance = conv(value, 111 110 units=detector.distance_unit) 112 111 else: … … 116 115 msg = "abs_reader: cannot open %s" % path 117 116 raise RuntimeError, msg 118 # Transmission 117 # Transmission 119 118 try: 120 119 output.sample.transmission = float(line_toks[4]) … … 129 128 output.sample.thickness_unit != 'cm': 130 129 conv = Converter('cm') 131 output.sample.thickness = conv(value, 130 output.sample.thickness = conv(value, 132 131 units=output.sample.thickness_unit) 133 132 else: … … 144 143 # Find center info line 145 144 if is_center == True: 146 is_center = False 145 is_center = False 147 146 line_toks = line.split() 148 147 # Center in bin number … … 154 153 detector.pixel_size_unit != 'mm': 155 154 conv = Converter('mm') 156 detector.pixel_size.x = conv(5.0, 155 detector.pixel_size.x = conv(5.0, 157 156 units=detector.pixel_size_unit) 158 157 detector.pixel_size.y = conv(5.0, … … 164 163 # Store beam center in distance units 165 164 # Det 640 x 640 mm 166 if has_converter ==True and \165 if has_converter == True and \ 167 166 detector.beam_center_unit != 'mm': 168 167 conv = Converter('mm') 169 168 detector.beam_center.x = conv(center_x * 5.0, 170 169 units=detector.beam_center_unit) 171 detector.beam_center.y = conv(center_y * 5.0, 170 detector.beam_center.y = conv(center_y * 5.0, 172 171 units=detector.beam_center_unit) 173 172 else: … … 182 181 pass 183 182 184 #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L 185 # BSTOP(mm) DET_TYP 183 #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L 184 # BSTOP(mm) DET_TYP 186 185 if line.count("BCENT") > 0: 187 186 is_center = True … … 191 190 toks = line.split() 192 191 193 try: 192 try: 194 193 _x = float(toks[0]) 195 _y = float(toks[1]) 194 _y = float(toks[1]) 196 195 _dy = float(toks[2]) 197 196 _dx = float(toks[3]) … … 205 204 _dy = data_conv_i(_dy, units=output.y_unit) 206 205 207 x = numpy.append(x, _x)208 y = numpy.append(y,_y)206 x = numpy.append(x, _x) 207 y = numpy.append(y, _y) 209 208 dy = numpy.append(dy, _dy) 210 dx 209 dx = numpy.append(dx, _dx) 211 210 212 211 except: … … 218 217 #The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 219 218 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 220 if line.count("The 6 columns") >0:219 if line.count("The 6 columns") > 0: 221 220 is_data_started = True 222 221 … … 230 229 raise ValueError, "ascii_reader: could not load file" 231 230 232 output.x = x[x !=0]233 output.y = y[x !=0]234 output.dy = dy[x !=0]235 output.dx = dx[x !=0]231 output.x = x[x != 0] 232 output.y = y[x != 0] 233 output.dy = dy[x != 0] 234 output.dx = dx[x != 0] 236 235 if data_conv_q is not None: 237 236 output.xaxis("\\rm{Q}", output.x_unit) … … 241 240 output.yaxis("\\rm{Intensity}", output.y_unit) 242 241 else: 243 output.yaxis("\\rm{Intensity}", "cm^{-1}")242 output.yaxis("\\rm{Intensity}", "cm^{-1}") 244 243 245 244 # Store loading process information 246 output.meta_data['loader'] = self.type_name 245 output.meta_data['loader'] = self.type_name 247 246 return output 248 247 else: 249 248 raise RuntimeError, "%s is not a file" % path 250 249 return None 251 252 if __name__ == "__main__":253 reader = Reader()254 print reader.read("../test/jan08002.ABS")255 256 257 -
sansdataloader/src/sans/dataloader/readers/ascii_reader.py
r9cd0baa r7d6351e 1 2 1 """ 2 ASCII reader 3 """ 3 4 ############################################################################ 4 5 #This software was developed by the University of Tennessee as part of the 5 6 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 6 7 #project funded by the US National Science Foundation. 7 #If you use DANSE applications to do scientific research that leads to 8 #publication, we ask that you acknowledge the use of the software with the 8 #If you use DANSE applications to do scientific research that leads to 9 #publication, we ask that you acknowledge the use of the software with the 9 10 #following sentence: 10 #This work benefited from DANSE software developed under NSF award DMR-0520547. 11 #This work benefited from DANSE software developed under NSF award DMR-0520547. 11 12 #copyright 2008, University of Tennessee 12 13 ############################################################################# … … 24 25 has_converter = False 25 26 _ZERO = 1e-16 27 26 28 27 29 class Reader: … … 38 40 "CSV files (*.csv)|*.csv"] 39 41 ## List of allowed extensions 40 ext = ['.txt', '.TXT', '.dat', '.DAT', '.abs', '.ABS', 'csv', 'CSV'] 42 ext = ['.txt', '.TXT', '.dat', '.DAT', '.abs', '.ABS', 'csv', 'CSV'] 41 43 42 44 ## Flag to bypass extension check … … 44 46 45 47 def read(self, path): 46 """ 48 """ 47 49 Load data file 48 50 … … 55 57 """ 56 58 if os.path.isfile(path): 57 basename 59 basename = os.path.basename(path) 58 60 _, extension = os.path.splitext(basename) 59 61 if self.allow_all or extension.lower() in self.ext: … … 61 63 # Read in binary mode since GRASP frequently has no-ascii 62 64 # characters that brakes the open operation 63 input_f = 64 except 65 input_f = open(path,'rb') 66 except: 65 67 raise RuntimeError, "ascii_reader: cannot open %s" % path 66 68 buff = input_f.read() 67 69 lines = buff.split('\n') 68 70 69 #Jae could not find python universal line spliter: 71 #Jae could not find python universal line spliter: 70 72 #keep the below for now 71 73 # some ascii data has \r line separator, 72 74 # try it when the data is on only one long line 73 if len(lines) < 2 : 75 if len(lines) < 2 : 74 76 lines = buff.split('\r') 75 77 … … 101 103 data_conv_i(1.0, output.y_unit) 102 104 103 104 105 # The first good line of data will define whether 105 106 # we have 2-column or 3-column ascii … … 108 109 109 110 #Initialize counters for data lines and header lines. 110 is_data = False #Has more than 5 lines111 is_data = False # Has more than 5 lines 111 112 # More than "5" lines of data is considered as actual 112 113 # data unless that is the only data 113 mum_data_lines = 5 114 mum_data_lines = 5 114 115 # To count # of current data candidate lines 115 i = -1 116 # To count total # of previous data candidate lines 117 i1 = -1 118 # To count # of header lines 116 i = -1 117 # To count total # of previous data candidate lines 118 i1 = -1 119 # To count # of header lines 119 120 j = -1 120 # Helps to count # of header lines 121 # Helps to count # of header lines 121 122 j1 = -1 122 #minimum required number of columns of data; ( <= 4). 123 lentoks = 2 123 #minimum required number of columns of data; ( <= 4). 124 lentoks = 2 124 125 for line in lines: 125 126 toks = line.split(',') … … 146 147 147 148 if data_conv_i is not None: 148 _y = data_conv_i(_y, units=output.y_unit) 149 _y = data_conv_i(_y, units=output.y_unit) 149 150 150 151 # If we have an extra token, check … … 186 187 has_error_dx = False if _dx == None else True 187 188 188 #After talked with PB, we decided to take care of only 189 #After talked with PB, we decided to take care of only 189 190 # 4 columns of data for now. 190 191 #number of columns in the current line 191 #To remember the # of columns in the current 192 #To remember the # of columns in the current 192 193 #line of data 193 194 new_lentoks = len(toks) 194 195 195 #If the previous columns not equal to the current, 196 #mark the previous as non-data and reset the dependents. 197 if lentoks != new_lentoks 196 #If the previous columns not equal to the current, 197 #mark the previous as non-data and reset the dependents. 198 if lentoks != new_lentoks: 198 199 if is_data == True: 199 200 break … … 203 204 j = -1 204 205 j1 = -1 205 206 206 207 207 #Delete the previously stored lines of data candidates … … 215 215 pass 216 216 217 x = numpy.append(x,_x)218 y = numpy.append(y,_y)217 x = numpy.append(x, _x) 218 y = numpy.append(y, _y) 219 219 220 220 if has_error_dy == True: … … 224 224 is_data == False: 225 225 try: 226 dy = numpy.zeros(0) 226 dy = numpy.zeros(0) 227 227 except: 228 pass 228 pass 229 229 dy = numpy.append(dy, _dy) 230 230 … … 235 235 is_data == False: 236 236 try: 237 dx = numpy.zeros(0) 237 dx = numpy.zeros(0) 238 238 except: 239 pass 239 pass 240 240 dx = numpy.append(dx, _dx) 241 241 … … 249 249 ty = numpy.zeros(0) 250 250 except: 251 pass 252 253 tx = numpy.append(tx, _x)254 ty = numpy.append(ty,_y)251 pass 252 253 tx = numpy.append(tx, _x) 254 ty = numpy.append(ty, _y) 255 255 256 256 if has_error_dy == True: … … 262 262 tdy = numpy.zeros(0) 263 263 except: 264 pass 264 pass 265 265 tdy = numpy.append(tdy, _dy) 266 266 if has_error_dx == True: … … 272 272 tdx = numpy.zeros(0) 273 273 except: 274 pass 274 pass 275 275 tdx = numpy.append(tdx, _dx) 276 276 … … 278 278 if lentoks < new_lentoks: 279 279 if is_data == False: 280 i1 = -1 280 i1 = -1 281 281 #To remember the # of columns on the current line 282 282 # for the next line of data 283 283 lentoks = len(toks) 284 284 285 #Reset # of header lines and counts # 286 # of data candidate lines 285 #Reset # of header lines and counts # 286 # of data candidate lines 287 287 if j == 0 and j1 == 0: 288 i1 = i + 1 288 i1 = i + 1 289 289 i += 1 290 290 except: … … 292 292 # It is data and meet non - number, then stop reading 293 293 if is_data == True: 294 break 294 break 295 295 lentoks = 2 296 #Counting # of header lines 296 #Counting # of header lines 297 297 j += 1 298 298 if j == j1 + 1: 299 j1 = j 300 else: 299 j1 = j 300 else: 301 301 j = -1 302 302 #Reset # of lines of data candidates 303 303 i = -1 304 304 305 # Couldn't parse this line, skip it 305 # Couldn't parse this line, skip it 306 306 pass 307 307 308 input_f.close() 308 input_f.close() 309 309 # Sanity check 310 310 if has_error_dy == True and not len(y) == len(dy): … … 329 329 if has_error_dx == True: 330 330 dx[i] = tdx[ind[i]] 331 # Zeros in dx, dy 331 # Zeros in dx, dy 332 332 if has_error_dx: 333 dx[dx ==0] = _ZERO333 dx[dx == 0] = _ZERO 334 334 if has_error_dy: 335 dy[dy==0] = _ZERO 336 #Data 337 output.x = x[x!=0] 338 output.y = y[x!=0] 339 output.dy = dy[x!=0] if has_error_dy == True else numpy.zeros(len(output.y)) 340 output.dx = dx[x!=0] if has_error_dx == True else numpy.zeros(len(output.x)) 335 dy[dy == 0] = _ZERO 336 #Data 337 output.x = x[x != 0] 338 output.y = y[x != 0] 339 output.dy = dy[x != 0] if has_error_dy == True\ 340 else numpy.zeros(len(output.y)) 341 output.dx = dx[x != 0] if has_error_dx == True\ 342 else numpy.zeros(len(output.x)) 341 343 342 344 if data_conv_q is not None: … … 347 349 output.yaxis("\\rm{Intensity}", output.y_unit) 348 350 else: 349 output.yaxis("\\rm{Intensity}", "cm^{-1}")351 output.yaxis("\\rm{Intensity}", "cm^{-1}") 350 352 351 353 # Store loading process information 352 output.meta_data['loader'] = self.type_name 354 output.meta_data['loader'] = self.type_name 353 355 354 356 return output … … 357 359 raise RuntimeError, "%s is not a file" % path 358 360 return None 359 360 if __name__ == "__main__":361 reader = Reader()362 #print reader.read("../test/test_3_columns.txt")363 print reader.read("../test/empty.txt")364 365 366 -
sansdataloader/src/sans/dataloader/readers/associations.py
rf576de0 r7d6351e 1 2 3 ############################################################################4 #This software was developed by the University of Tennessee as part of the5 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE)6 #project funded by the US National Science Foundation.7 #If you use DANSE applications to do scientific research that leads to8 #publication, we ask that you acknowledge the use of the software with the9 #following sentence:10 #This work benefited from DANSE software developed under NSF award DMR-0520547.11 #copyright 2009, University of Tennessee12 #############################################################################13 14 15 1 """ 16 2 Module to associate default readers to file extensions. … … 18 4 The readers are tried in order they appear when reading a file. 19 5 """ 20 6 ############################################################################ 7 #This software was developed by the University of Tennessee as part of the 8 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 9 #project funded by the US National Science Foundation. 10 #If you use DANSE applications to do scientific research that leads to 11 #publication, we ask that you acknowledge the use of the software with the 12 #following sentence: 13 #This work benefited from DANSE software developed under NSF award DMR-0520547. 14 #copyright 2009, University of Tennessee 15 ############################################################################# 21 16 import os 22 17 import sys 23 18 import logging 24 from lxml import etree 19 from lxml import etree 25 20 # Py2exe compatibility: import _elementpath to ensure that py2exe finds it 26 from lxml import _elementpath 21 from lxml import _elementpath 27 22 28 23 ## Format version for the XML settings file 29 24 VERSION = 'sansloader/1.0' 25 30 26 31 27 def read_associations(loader, settings='defaults.xml'): … … 54 50 55 51 # Check the format version number 56 # Specifying the namespace will take care of the file format version 52 # Specifying the namespace will take care of the file format version 57 53 root = tree.getroot() 58 54 … … 64 60 for entry in entry_list: 65 61 reader = entry.get('reader') 66 ext 62 ext = entry.get('extension') 67 63 68 64 if reader is not None and ext is not None: 69 65 # Associate the extension with a particular reader 70 # TODO: Modify the Register code to be case-insensitive 66 # TODO: Modify the Register code to be case-insensitive 71 67 # and remove the extra line below. 72 68 try: … … 114 110 registry_function(tiff_reader) 115 111 116 return True 117 118 119 if __name__ == "__main__": 120 logging.basicConfig(level=logging.INFO, 121 format='%(asctime)s %(levelname)s %(message)s', 122 filename='logger.log', 123 filemode='w') 124 from sans.dataloader.loader import Loader 125 l = Loader() 126 read_associations(l) 127 128 129 print l.get_wildcards() 130 131 112 return True -
sansdataloader/src/sans/dataloader/readers/cansas_reader.py
r5a0dac1f r7d6351e 1 2 1 """ 2 CanSAS data reader 3 """ 3 4 ############################################################################ 4 5 #This software was developed by the University of Tennessee as part of the … … 25 26 # Process notes have the same problem. 26 27 #TODO: Unit conversion is not complete (temperature units are missing) 27 28 28 29 29 import logging … … 47 47 CANSAS_NS = "cansas1d/1.0" 48 48 ALLOW_ALL = True 49 49 50 50 51 def write_node(doc, parent, name, value, attr={}): … … 67 68 return False 68 69 70 69 71 def get_content(location, node): 70 72 """ … … 78 80 nodes = node.xpath(location, namespaces={'ns': CANSAS_NS}) 79 81 80 if len(nodes) >0:82 if len(nodes) > 0: 81 83 return nodes[0] 82 84 else: 83 85 return None 84 86 87 85 88 def get_float(location, node): 86 89 """ 87 Get the content of a node as a float 90 Get the content of a node as a float 88 91 89 92 :param location: xpath location … … 96 99 if len(nodes) > 0: 97 100 try: 98 value = float(nodes[0].text) 101 value = float(nodes[0].text) 99 102 except: 100 103 # Could not pass, skip and return None … … 124 127 125 128 ## List of allowed extensions 126 ext = ['.xml', '.XML', '.avex', '.AVEx', '.absx', 'ABSx'] 129 ext = ['.xml', '.XML', '.avex', '.AVEx', '.absx', 'ABSx'] 127 130 128 131 def __init__(self): … … 131 134 132 135 def read(self, path): 133 """ 136 """ 134 137 Load data file 135 138 … … 145 148 output = [] 146 149 if os.path.isfile(path): 147 basename 150 basename = os.path.basename(path) 148 151 root, extension = os.path.splitext(basename) 149 152 if ALLOW_ALL or extension.lower() in self.ext: … … 152 155 # Check the format version number 153 156 # Specifying the namespace will take care of the file 154 # format version 157 # format version 155 158 root = tree.getroot() 156 159 … … 173 176 # Return output consistent with the loader's api 174 177 if len(output) == 0: 175 #cannot return none when it cannot read 178 #cannot return none when it cannot read 176 179 #return None 177 180 raise RuntimeError, "%s cannot be read \n" % path … … 179 182 return output[0] 180 183 else: 181 return output 184 return output 182 185 183 186 def _parse_entry(self, dom): … … 194 197 data_info = Data1D(x, y) 195 198 196 # Look up title 199 # Look up title 197 200 self._store_content('ns:Title', dom, 'title', data_info) 198 201 199 # Look up run number 202 # Look up run number 200 203 nodes = dom.xpath('ns:Run', namespaces={'ns': CANSAS_NS}) 201 for item in nodes: 204 for item in nodes: 202 205 if item.text is not None: 203 206 value = item.text.strip() … … 207 210 data_info.run_name[value] = item.get('name') 208 211 209 # Look up instrument name 212 # Look up instrument name 210 213 self._store_content('ns:SASinstrument/ns:name', dom, 'instrument', 211 214 data_info) … … 230 233 data_info.sample.name = entry.get('name') 231 234 232 self._store_content('ns:SASsample/ns:ID', 233 dom, 'ID', data_info.sample) 234 self._store_float('ns:SASsample/ns:thickness', 235 self._store_content('ns:SASsample/ns:ID', 236 dom, 'ID', data_info.sample) 237 self._store_float('ns:SASsample/ns:thickness', 235 238 dom, 'thickness', data_info.sample) 236 self._store_float('ns:SASsample/ns:transmission', 239 self._store_float('ns:SASsample/ns:transmission', 237 240 dom, 'transmission', data_info.sample) 238 self._store_float('ns:SASsample/ns:temperature', 241 self._store_float('ns:SASsample/ns:temperature', 239 242 dom, 'temperature', data_info.sample) 240 243 241 nodes = dom.xpath('ns:SASsample/ns:details', 244 nodes = dom.xpath('ns:SASsample/ns:details', 242 245 namespaces={'ns': CANSAS_NS}) 243 246 for item in nodes: … … 254 257 255 258 # Position (as a vector) 256 self._store_float('ns:SASsample/ns:position/ns:x', 257 dom, 'position.x', data_info.sample) 258 self._store_float('ns:SASsample/ns:position/ns:y', 259 dom, 'position.y', data_info.sample) 260 self._store_float('ns:SASsample/ns:position/ns:z', 261 dom, 'position.z', data_info.sample) 259 self._store_float('ns:SASsample/ns:position/ns:x', 260 dom, 'position.x', data_info.sample) 261 self._store_float('ns:SASsample/ns:position/ns:y', 262 dom, 'position.y', data_info.sample) 263 self._store_float('ns:SASsample/ns:position/ns:z', 264 dom, 'position.z', data_info.sample) 262 265 263 266 # Orientation (as a vector) 264 self._store_float('ns:SASsample/ns:orientation/ns:roll', 265 dom, 'orientation.x', data_info.sample) 266 self._store_float('ns:SASsample/ns:orientation/ns:pitch', 267 dom, 'orientation.y', data_info.sample) 268 self._store_float('ns:SASsample/ns:orientation/ns:yaw', 269 dom, 'orientation.z', data_info.sample) 267 self._store_float('ns:SASsample/ns:orientation/ns:roll', 268 dom, 'orientation.x', data_info.sample) 269 self._store_float('ns:SASsample/ns:orientation/ns:pitch', 270 dom, 'orientation.y', data_info.sample) 271 self._store_float('ns:SASsample/ns:orientation/ns:yaw', 272 dom, 'orientation.z', data_info.sample) 270 273 271 274 # Source info ################### … … 274 277 data_info.source.name = entry.get('name') 275 278 276 self._store_content('ns:SASinstrument/ns:SASsource/ns:radiation', 277 dom, 'radiation', data_info.source) 278 self._store_content('ns:SASinstrument/ns:SASsource/ns:beam_shape', 279 dom, 'beam_shape', data_info.source) 280 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength', 281 dom, 'wavelength', data_info.source) 282 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_min', 283 dom, 'wavelength_min', data_info.source) 284 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_max', 285 dom, 'wavelength_max', data_info.source) 286 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_spread', 287 dom, 'wavelength_spread', data_info.source) 279 self._store_content('ns:SASinstrument/ns:SASsource/ns:radiation', 280 dom, 'radiation', data_info.source) 281 self._store_content('ns:SASinstrument/ns:SASsource/ns:beam_shape', 282 dom, 'beam_shape', data_info.source) 283 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength', 284 dom, 'wavelength', data_info.source) 285 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_min', 286 dom, 'wavelength_min', data_info.source) 287 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_max', 288 dom, 'wavelength_max', data_info.source) 289 self._store_float('ns:SASinstrument/ns:SASsource/ns:wavelength_spread', 290 dom, 'wavelength_spread', data_info.source) 288 291 289 292 # Beam size (as a vector) … … 292 295 data_info.source.beam_size_name = entry.get('name') 293 296 294 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:x', 295 dom, 'beam_size.x', data_info.source) 296 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:y', 297 dom, 'beam_size.y', data_info.source) 298 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:z', 299 dom, 'beam_size.z', data_info.source) 297 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:x', 298 dom, 'beam_size.x', data_info.source) 299 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:y', 300 dom, 'beam_size.y', data_info.source) 301 self._store_float('ns:SASinstrument/ns:SASsource/ns:beam_size/ns:z', 302 dom, 'beam_size.z', data_info.source) 300 303 301 304 # Collimation info ################### 302 nodes = dom.xpath('ns:SASinstrument/ns:SAScollimation', 305 nodes = dom.xpath('ns:SASinstrument/ns:SAScollimation', 303 306 namespaces={'ns': CANSAS_NS}) 304 307 for item in nodes: … … 306 309 if item.get('name') is not None: 307 310 collim.name = item.get('name') 308 self._store_float('ns:length', item, 'length', collim) 311 self._store_float('ns:length', item, 'length', collim) 309 312 310 313 # Look for apertures 311 314 apert_list = item.xpath('ns:aperture', namespaces={'ns': CANSAS_NS}) 312 315 for apert in apert_list: 313 aperture = 316 aperture = Aperture() 314 317 315 318 # Get the name and type of the aperture … … 317 320 aperture.type = apert.get('type') 318 321 319 self._store_float('ns:distance', apert, 'distance', aperture) 322 self._store_float('ns:distance', apert, 'distance', aperture) 320 323 321 324 entry = get_content('ns:size', apert) … … 323 326 aperture.size_name = entry.get('name') 324 327 325 self._store_float('ns:size/ns:x', apert, 'size.x', aperture) 326 self._store_float('ns:size/ns:y', apert, 'size.y', aperture) 328 self._store_float('ns:size/ns:x', apert, 'size.x', aperture) 329 self._store_float('ns:size/ns:y', apert, 'size.y', aperture) 327 330 self._store_float('ns:size/ns:z', apert, 'size.z', aperture) 328 331 … … 339 342 340 343 self._store_content('ns:name', item, 'name', detector) 341 self._store_float('ns:SDD', item, 'distance', detector) 344 self._store_float('ns:SDD', item, 'distance', detector) 342 345 343 346 # Detector offset (as a vector) 344 self._store_float('ns:offset/ns:x', item, 'offset.x', detector) 345 self._store_float('ns:offset/ns:y', item, 'offset.y', detector) 346 self._store_float('ns:offset/ns:z', item, 'offset.z', detector) 347 self._store_float('ns:offset/ns:x', item, 'offset.x', detector) 348 self._store_float('ns:offset/ns:y', item, 'offset.y', detector) 349 self._store_float('ns:offset/ns:z', item, 'offset.z', detector) 347 350 348 351 # Detector orientation (as a vector) 349 self._store_float('ns:orientation/ns:roll', 350 detector) 352 self._store_float('ns:orientation/ns:roll', item, 'orientation.x', 353 detector) 351 354 self._store_float('ns:orientation/ns:pitch', item, 'orientation.y', 352 detector) 353 self._store_float('ns:orientation/ns:yaw', 354 detector) 355 detector) 356 self._store_float('ns:orientation/ns:yaw', item, 'orientation.z', 357 detector) 355 358 356 359 # Beam center (as a vector) 357 360 self._store_float('ns:beam_center/ns:x', item, 'beam_center.x', 358 detector) 359 self._store_float('ns:beam_center/ns:y', item, 'beam_center.y', 360 detector) 361 detector) 362 self._store_float('ns:beam_center/ns:y', item, 'beam_center.y', 363 detector) 361 364 self._store_float('ns:beam_center/ns:z', item, 'beam_center.z', 362 detector) 365 detector) 363 366 364 367 # Pixel size (as a vector) 365 368 self._store_float('ns:pixel_size/ns:x', item, 'pixel_size.x', 366 detector) 369 detector) 367 370 self._store_float('ns:pixel_size/ns:y', item, 'pixel_size.y', 368 detector) 371 detector) 369 372 self._store_float('ns:pixel_size/ns:z', item, 'pixel_size.z', 370 detector) 373 detector) 371 374 372 375 self._store_float('ns:slit_length', item, 'slit_length', detector) 373 376 374 data_info.detector.append(detector) 377 data_info.detector.append(detector) 375 378 376 379 # Processes info ###################### … … 397 400 logging.error(err_mess) 398 401 399 note_list = item.xpath('ns:SASprocessnote', 402 note_list = item.xpath('ns:SASprocessnote', 400 403 namespaces={'ns': CANSAS_NS}) 401 404 for note in note_list: … … 404 407 405 408 data_info.process.append(process) 406 407 409 408 410 # Data info ###################### … … 415 417 nodes = dom.xpath('ns:SASdata/ns:Idata', namespaces={'ns': CANSAS_NS}) 416 418 417 x 418 y 419 x = numpy.zeros(0) 420 y = numpy.zeros(0) 419 421 dx = numpy.zeros(0) 420 422 dy = numpy.zeros(0) … … 434 436 _dxw = 0.0 435 437 436 if attr.has_key('unit')and \438 if 'unit' in attr and \ 437 439 attr['unit'].lower() != data_info.x_unit.lower(): 438 440 if HAS_CONVERTER == True: … … 441 443 _x = data_conv_q(_x, units=data_info.x_unit) 442 444 except: 443 msg = 445 msg = "CanSAS reader: could not convert " 444 446 msg += "Q unit [%s]; " % attr['unit'], 445 msg += "expecting [%s]\n %s" % (data_info.x_unit, 447 msg += "expecting [%s]\n %s" % (data_info.x_unit, 446 448 sys.exc_value) 447 449 raise ValueError, msg … … 454 456 455 457 # Error in Q 456 if attr_d.has_key('unit')and \458 if 'unit' in attr_d and \ 457 459 attr_d['unit'].lower() != data_info.x_unit.lower(): 458 460 if HAS_CONVERTER == True: … … 463 465 msg = "CanSAS reader: could not convert dQ unit [%s]; "\ 464 466 % attr['unit'] 465 msg += " expecting " 467 msg += " expecting " 466 468 msg += "[%s]\n %s" % (data_info.x_unit, sys.exc_value) 467 469 raise ValueError, msg … … 471 473 % attr['unit'] 472 474 msg += "expecting [%s]" % data_info.x_unit 473 raise ValueError, 475 raise ValueError, msg 474 476 475 477 # Slit length 476 if attr_l.has_key('unit')and \478 if 'unit' in attr_l and \ 477 479 attr_l['unit'].lower() != data_info.x_unit.lower(): 478 480 if HAS_CONVERTER == True: … … 483 485 msg = "CanSAS reader: could not convert dQl unit [%s];"\ 484 486 % attr['unit'] 485 msg += " expecting [%s]\n %s" % (data_info.x_unit, 487 msg += " expecting [%s]\n %s" % (data_info.x_unit, 486 488 sys.exc_value) 487 raise ValueError, msg 488 489 raise ValueError, msg 489 490 else: 490 491 msg = "CanSAS reader: unrecognized dQl unit [%s];"\ … … 494 495 495 496 # Slit width 496 if attr_w.has_key('unit')and \497 if 'unit' in attr_w and \ 497 498 attr_w['unit'].lower() != data_info.x_unit.lower(): 498 499 if HAS_CONVERTER == True: … … 503 504 msg = "CanSAS reader: could not convert dQw unit [%s];"\ 504 505 % attr['unit'] 505 msg += " expecting [%s]\n %s" % (data_info.x_unit, 506 msg += " expecting [%s]\n %s" % (data_info.x_unit, 506 507 sys.exc_value) 507 508 raise ValueError, msg … … 511 512 % attr['unit'] 512 513 msg += " expecting [%s]" % data_info.x_unit 513 raise ValueError, msg 514 raise ValueError, msg 514 515 _y, attr = get_float('ns:I', item) 515 516 _dy, attr_d = get_float('ns:Idev', item) 516 517 if _dy == None: 517 518 _dy = 0.0 518 if attr.has_key('unit')and \519 if 'unit' in attr and \ 519 520 attr['unit'].lower() != data_info.y_unit.lower(): 520 521 if HAS_CONVERTER == True: … … 537 538 raise ValueError, msg 538 539 539 if attr_d.has_key('unit')and \540 if 'unit' in attr_d and \ 540 541 attr_d['unit'].lower() != data_info.y_unit.lower(): 541 542 if HAS_CONVERTER == True: … … 545 546 except: 546 547 if attr_d['unit'].lower() == 'count': 547 pass 548 pass 548 549 else: 549 550 msg = "CanSAS reader: could not convert dI(q) unit " 550 msg += "[%s]; expecting [%s]\n %s" 551 msg += "[%s]; expecting [%s]\n %s" % (attr_d['unit'], 551 552 data_info.y_unit, sys.exc_value) 552 553 raise ValueError, msg … … 558 559 559 560 if _x is not None and _y is not None: 560 x 561 y 561 x = numpy.append(x, _x) 562 y = numpy.append(y, _y) 562 563 dx = numpy.append(dx, _dx) 563 564 dy = numpy.append(dy, _dy) 564 565 dxl = numpy.append(dxl, _dxl) 565 566 dxw = numpy.append(dxw, _dxw) 566 # Zeros in dx, dy 567 if not numpy.all(dx ==0):568 dx[dx ==0] = _ZERO569 if not numpy.all(dy ==0):570 dy[dy ==0] = _ZERO567 # Zeros in dx, dy 568 if not numpy.all(dx == 0): 569 dx[dx == 0] = _ZERO 570 if not numpy.all(dy == 0): 571 dy[dy == 0] = _ZERO 571 572 572 data_info.x = x[x !=0]573 data_info.y = y[x !=0]574 data_info.dx = dx[x !=0]575 576 data_info.dy = dy[x !=0]577 data_info.dxl = dxl[x !=0]578 data_info.dxw = dxw[x !=0]573 data_info.x = x[x != 0] 574 data_info.y = y[x != 0] 575 data_info.dx = dx[x != 0] 576 577 data_info.dy = dy[x != 0] 578 data_info.dxl = dxl[x != 0] 579 data_info.dxw = dxw[x != 0] 579 580 580 581 data_conv_q = None … … 589 590 data_conv_i = Converter('1/cm') 590 591 # Test it 591 data_conv_i(1.0, output.I_unit) 592 data_conv_i(1.0, output.I_unit) 592 593 593 594 if data_conv_q is not None: … … 598 599 data_info.yaxis("\\rm{Intensity}", data_info.y_unit) 599 600 else: 600 data_info.yaxis("\\rm{Intensity}", "cm^{-1}")601 data_info.yaxis("\\rm{Intensity}", "cm^{-1}") 601 602 602 603 return data_info … … 629 630 for item in datainfo.run: 630 631 runname = {} 631 if datainfo.run_name.has_key(item)and \632 len(str(datainfo.run_name[item])) >1:633 runname = {'name': datainfo.run_name[item] 632 if item in datainfo.run_name and \ 633 len(str(datainfo.run_name[item])) > 1: 634 runname = {'name': datainfo.run_name[item]} 634 635 write_node(doc, entry_node, "Run", item, runname) 635 636 … … 641 642 pt = doc.createElement("Idata") 642 643 node.appendChild(pt) 643 write_node(doc, pt, "Q", datainfo.x[i], {'unit': datainfo.x_unit})644 if len(datainfo.y) >=i:644 write_node(doc, pt, "Q", datainfo.x[i], {'unit': datainfo.x_unit}) 645 if len(datainfo.y) >= i: 645 646 write_node(doc, pt, "I", datainfo.y[i], 646 {'unit': datainfo.y_unit})647 {'unit': datainfo.y_unit}) 647 648 if datainfo.dx != None and len(datainfo.dx) >= i: 648 649 write_node(doc, pt, "Qdev", datainfo.dx[i], 649 {'unit': datainfo.x_unit})650 {'unit': datainfo.x_unit}) 650 651 if datainfo.dxl != None and len(datainfo.dxl) >= i: 651 652 write_node(doc, pt, "dQl", datainfo.dxl[i], 652 {'unit': datainfo.x_unit})653 {'unit': datainfo.x_unit}) 653 654 if datainfo.dxw != None and len(datainfo.dxw) >= i: 654 655 write_node(doc, pt, "dQw", datainfo.dxw[i], 655 {'unit': datainfo.x_unit})656 {'unit': datainfo.x_unit}) 656 657 if datainfo.dy != None and len(datainfo.dy) >= i: 657 658 write_node(doc, pt, "Idev", datainfo.dy[i], 658 {'unit':datainfo.y_unit}) 659 660 659 {'unit': datainfo.y_unit}) 660 661 661 # Sample info 662 662 sample = doc.createElement("SASsample") … … 666 666 write_node(doc, sample, "ID", str(datainfo.sample.ID)) 667 667 write_node(doc, sample, "thickness", datainfo.sample.thickness, 668 {"unit": datainfo.sample.thickness_unit})668 {"unit": datainfo.sample.thickness_unit}) 669 669 write_node(doc, sample, "transmission", datainfo.sample.transmission) 670 670 write_node(doc, sample, "temperature", datainfo.sample.temperature, 671 {"unit": datainfo.sample.temperature_unit})671 {"unit": datainfo.sample.temperature_unit}) 672 672 673 673 for item in datainfo.sample.details: … … 676 676 pos = doc.createElement("position") 677 677 written = write_node(doc, pos, "x", datainfo.sample.position.x, 678 {"unit": datainfo.sample.position_unit})678 {"unit": datainfo.sample.position_unit}) 679 679 written = written | write_node(doc, pos, "y", 680 680 datainfo.sample.position.y, 681 {"unit": datainfo.sample.position_unit})681 {"unit": datainfo.sample.position_unit}) 682 682 written = written | write_node(doc, pos, "z", 683 683 datainfo.sample.position.z, 684 {"unit": datainfo.sample.position_unit})684 {"unit": datainfo.sample.position_unit}) 685 685 if written == True: 686 686 sample.appendChild(pos) … … 689 689 written = write_node(doc, ori, "roll", 690 690 datainfo.sample.orientation.x, 691 {"unit": datainfo.sample.orientation_unit})691 {"unit": datainfo.sample.orientation_unit}) 692 692 written = written | write_node(doc, ori, "pitch", 693 693 datainfo.sample.orientation.y, 694 {"unit": datainfo.sample.orientation_unit})694 {"unit": datainfo.sample.orientation_unit}) 695 695 written = written | write_node(doc, ori, "yaw", 696 696 datainfo.sample.orientation.z, 697 {"unit": datainfo.sample.orientation_unit})697 {"unit": datainfo.sample.orientation_unit}) 698 698 if written == True: 699 699 sample.appendChild(ori) … … 717 717 size.setAttribute("name", str(datainfo.source.beam_size_name)) 718 718 written = write_node(doc, size, "x", datainfo.source.beam_size.x, 719 {"unit": datainfo.source.beam_size_unit})719 {"unit": datainfo.source.beam_size_unit}) 720 720 written = written | write_node(doc, size, "y", 721 721 datainfo.source.beam_size.y, 722 {"unit": datainfo.source.beam_size_unit})722 {"unit": datainfo.source.beam_size_unit}) 723 723 written = written | write_node(doc, size, "z", 724 724 datainfo.source.beam_size.z, 725 {"unit": datainfo.source.beam_size_unit})725 {"unit": datainfo.source.beam_size_unit}) 726 726 if written == True: 727 727 source.appendChild(size) … … 729 729 write_node(doc, source, "wavelength", 730 730 datainfo.source.wavelength, 731 {"unit": datainfo.source.wavelength_unit})731 {"unit": datainfo.source.wavelength_unit}) 732 732 write_node(doc, source, "wavelength_min", 733 733 datainfo.source.wavelength_min, 734 {"unit": datainfo.source.wavelength_min_unit})734 {"unit": datainfo.source.wavelength_min_unit}) 735 735 write_node(doc, source, "wavelength_max", 736 736 datainfo.source.wavelength_max, 737 {"unit": datainfo.source.wavelength_max_unit})737 {"unit": datainfo.source.wavelength_max_unit}) 738 738 write_node(doc, source, "wavelength_spread", 739 739 datainfo.source.wavelength_spread, 740 {"unit": datainfo.source.wavelength_spread_unit})740 {"unit": datainfo.source.wavelength_spread_unit}) 741 741 742 742 # Collimation … … 748 748 749 749 write_node(doc, coll, "length", item.length, 750 {"unit": item.length_unit})750 {"unit": item.length_unit}) 751 751 752 752 for apert in item.aperture: … … 759 759 760 760 write_node(doc, ap, "distance", apert.distance, 761 {"unit": apert.distance_unit})761 {"unit": apert.distance_unit}) 762 762 763 763 size = doc.createElement("size") … … 765 765 size.setAttribute("name", str(apert.size_name)) 766 766 written = write_node(doc, size, "x", apert.size.x, 767 {"unit": apert.size_unit})767 {"unit": apert.size_unit}) 768 768 written = written | write_node(doc, size, "y", apert.size.y, 769 {"unit": apert.size_unit})769 {"unit": apert.size_unit}) 770 770 written = written | write_node(doc, size, "z", apert.size.z, 771 {"unit": apert.size_unit})771 {"unit": apert.size_unit}) 772 772 if written == True: 773 773 ap.appendChild(size) … … 778 778 written = write_node(doc, det, "name", item.name) 779 779 written = written | write_node(doc, det, "SDD", item.distance, 780 {"unit": item.distance_unit})780 {"unit": item.distance_unit}) 781 781 written = written | write_node(doc, det, "slit_length", 782 782 item.slit_length, 783 {"unit": item.slit_length_unit})783 {"unit": item.slit_length_unit}) 784 784 if written == True: 785 785 instr.appendChild(det) … … 787 787 off = doc.createElement("offset") 788 788 written = write_node(doc, off, "x", item.offset.x, 789 {"unit": item.offset_unit})789 {"unit": item.offset_unit}) 790 790 written = written | write_node(doc, off, "y", item.offset.y, 791 {"unit": item.offset_unit})791 {"unit": item.offset_unit}) 792 792 written = written | write_node(doc, off, "z", item.offset.z, 793 {"unit": item.offset_unit})793 {"unit": item.offset_unit}) 794 794 if written == True: 795 795 det.appendChild(off) … … 797 797 center = doc.createElement("beam_center") 798 798 written = write_node(doc, center, "x", item.beam_center.x, 799 {"unit": item.beam_center_unit})799 {"unit": item.beam_center_unit}) 800 800 written = written | write_node(doc, center, "y", 801 801 item.beam_center.y, 802 {"unit": item.beam_center_unit})802 {"unit": item.beam_center_unit}) 803 803 written = written | write_node(doc, center, "z", 804 804 item.beam_center.z, 805 {"unit": item.beam_center_unit})805 {"unit": item.beam_center_unit}) 806 806 if written == True: 807 807 det.appendChild(center) … … 809 809 pix = doc.createElement("pixel_size") 810 810 written = write_node(doc, pix, "x", item.pixel_size.x, 811 {"unit": item.pixel_size_unit})811 {"unit": item.pixel_size_unit}) 812 812 written = written | write_node(doc, pix, "y", item.pixel_size.y, 813 {"unit": item.pixel_size_unit})813 {"unit": item.pixel_size_unit}) 814 814 written = written | write_node(doc, pix, "z", item.pixel_size.z, 815 {"unit": item.pixel_size_unit})815 {"unit": item.pixel_size_unit}) 816 816 if written == True: 817 817 det.appendChild(pix) 818 818 819 819 ori = doc.createElement("orientation") 820 written = write_node(doc, ori, "roll", 821 {"unit": item.orientation_unit})820 written = write_node(doc, ori, "roll", item.orientation.x, 821 {"unit": item.orientation_unit}) 822 822 written = written | write_node(doc, ori, "pitch", 823 823 item.orientation.y, 824 {"unit": item.orientation_unit})824 {"unit": item.orientation_unit}) 825 825 written = written | write_node(doc, ori, "yaw", 826 826 item.orientation.z, 827 {"unit": item.orientation_unit})827 {"unit": item.orientation_unit}) 828 828 if written == True: 829 829 det.appendChild(ori) 830 830 831 832 831 # Processes info 833 832 for item in datainfo.process: … … 857 856 """ 858 857 # Create XML document 859 doc, sasentry= self._to_xml_doc(datainfo)858 doc, _ = self._to_xml_doc(datainfo) 860 859 # Write the file 861 860 fd = open(filename, 'w') … … 877 876 :param variable: name of the data member to store it in [string] 878 877 :param storage: data object that has the 'variable' data member 879 :param optional: if True, no exception will be raised 878 :param optional: if True, no exception will be raised 880 879 if unit conversion can't be done 881 880 … … 895 894 toks = variable.split('.') 896 895 exec "local_unit = storage.%s_unit" % toks[0] 897 if units.lower() !=local_unit.lower():896 if units.lower() != local_unit.lower(): 898 897 if HAS_CONVERTER == True: 899 898 try: … … 909 908 logging.info(err_mess) 910 909 else: 911 raise ValueError, err_mess 910 raise ValueError, err_mess 912 911 else: 913 912 err_mess = "CanSAS reader: unrecognized %s unit [%s];"\ … … 942 941 if entry is not None and entry.text is not None: 943 942 exec "storage.%s = entry.text.strip()" % variable 944 945 946 947 if __name__ == "__main__":948 logging.basicConfig(level=logging.ERROR,949 format='%(asctime)s %(levelname)s %(message)s',950 filename='cansas_reader.log',951 filemode='w')952 reader = Reader()953 print reader.read("../test/cansas1d.xml")954 #print reader.read("../test/latex_smeared.xml")955 956 957 -
sansdataloader/src/sans/dataloader/readers/danse_reader.py
rad8034f r7d6351e 1 2 1 """ 2 DANSE/SANS file reader 3 """ 3 4 ############################################################################ 4 5 #This software was developed by the University of Tennessee as part of the 5 6 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 6 7 #project funded by the US National Science Foundation. 7 #If you use DANSE applications to do scientific research that leads to 8 #publication, we ask that you acknowledge the use of the software with the 8 #If you use DANSE applications to do scientific research that leads to 9 #publication, we ask that you acknowledge the use of the software with the 9 10 #following sentence: 10 #This work benefited from DANSE software developed under NSF award DMR-0520547. 11 #This work benefited from DANSE software developed under NSF award DMR-0520547. 11 12 #copyright 2008, University of Tennessee 12 13 ############################################################################# 13 14 """15 DANSE/SANS file reader16 """17 18 14 import math 19 15 import os … … 31 27 has_converter = False 32 28 29 33 30 class Reader: 34 31 """ … … 36 33 """ 37 34 ## File type 38 type_name = "DANSE" 35 type_name = "DANSE" 39 36 ## Wildcards 40 37 type = ["DANSE files (*.sans)|*.sans"] 41 38 ## Extension 42 ext = ['.sans', '.SANS'] 39 ext = ['.sans', '.SANS'] 43 40 44 41 def read(self, filename=None): … … 56 53 try: 57 54 datafile = open(filename, 'r') 58 except 55 except: 59 56 raise RuntimeError,"danse_reader cannot open %s" % (filename) 60 57 … … 96 93 data_conv_i = Converter('1/cm') 97 94 # Test it 98 data_conv_i(1.0, output.I_unit) 95 data_conv_i(1.0, output.I_unit) 99 96 100 97 read_on = True … … 107 104 if toks[0] == "FORMATVERSION": 108 105 fversion = float(toks[1]) 109 if toks[0] == "WAVELENGTH": 110 wavelength = float(toks[1]) 106 if toks[0] == "WAVELENGTH": 107 wavelength = float(toks[1]) 111 108 elif toks[0] == "DISTANCE": 112 109 distance = float(toks[1]) … … 145 142 error.append(err) 146 143 except: 147 logging.info("Skipping line:%s,%s" %( 144 logging.info("Skipping line:%s,%s" %(data_str, 148 145 sys.exc_value)) 149 146 150 # Initialize 147 # Initialize 151 148 x_vals = [] 152 y_vals = [] 149 y_vals = [] 153 150 ymin = None 154 151 ymax = None … … 158 155 # Qx and Qy vectors 159 156 theta = pixel / distance / 100.0 160 stepq = 4.0 * math.pi /wavelength * math.sin(theta/2.0)157 stepq = 4.0 * math.pi / wavelength * math.sin(theta / 2.0) 161 158 for i_x in range(size_x): 162 159 theta = (i_x - center_x + 1) * pixel / distance / 100.0 163 qx = 4.0 * math.pi / wavelength * math.sin(theta /2.0)160 qx = 4.0 * math.pi / wavelength * math.sin(theta / 2.0) 164 161 165 162 if has_converter == True and output.Q_unit != '1/A': … … 176 173 for i_y in range(size_y): 177 174 theta = (i_y - center_y + 1) * pixel / distance / 100.0 178 qy = 4.0 * math.pi /wavelength * math.sin(theta/2.0)175 qy = 4.0 * math.pi / wavelength * math.sin(theta/2.0) 179 176 180 177 if has_converter == True and output.Q_unit != '1/A': … … 188 185 189 186 # Store the data in the 2D array 190 itot = 0 191 i_x = 0 192 i_y = -1 187 i_x = 0 188 i_y = -1 193 189 194 190 for i_pt in range(len(data)): … … 198 194 # For version 1.0, the data were still 199 195 # stored as strings at this point. 200 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]),196 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 201 197 sys.exc_value) 202 198 logging.info(msg) … … 209 205 i_x += 1 210 206 211 output.data[i_y][i_x] = value 212 #output.data[size_y-1-i_y][i_x] = value 207 output.data[i_y][i_x] = value 213 208 if fversion>1.0: 214 209 output.err_data[i_y][i_x] = error[i_pt] 215 #output.err_data[size_y-1-i_y][i_x] = error[i_pt] 216 217 218 # Store all data 210 211 # Store all data 219 212 # Store wavelength 220 213 if has_converter == True and output.source.wavelength_unit != 'A': … … 238 231 239 232 # Store beam center in distance units 240 detector.beam_center.x = center_x *pixel241 detector.beam_center.y = center_y *pixel233 detector.beam_center.x = center_x * pixel 234 detector.beam_center.y = center_y * pixel 242 235 243 236 # Store limits of the image (2D array) 244 237 xmin = xmin - stepq / 2.0 245 238 xmax = xmax + stepq / 2.0 246 ymin = ymin - stepq 239 ymin = ymin - stepq /2.0 247 240 ymax = ymax + stepq / 2.0 248 241 … … 258 251 259 252 # Store x and y axis bin centers 260 output.x_bins 261 output.y_bins 253 output.x_bins = x_vals 254 output.y_bins = y_vals 262 255 263 256 # Units … … 272 265 output.zaxis("\\rm{Intensity}", output.I_unit) 273 266 else: 274 output.zaxis("\\rm{Intensity}", "cm^{-1}")267 output.zaxis("\\rm{Intensity}", "cm^{-1}") 275 268 276 269 if not fversion >= 1.0: … … 281 274 282 275 # Store loading process information 283 output.meta_data['loader'] = self.type_name 276 output.meta_data['loader'] = self.type_name 284 277 output = reader2D_converter(output) 285 278 return output 286 279 287 280 return None 288 289 if __name__ == "__main__":290 reader = Reader()291 print reader.read("../test/MP_New.sans")292 -
sansdataloader/src/sans/dataloader/readers/hfir1d_reader.py
rad8034f r7d6351e 1 1 """ 2 HFIR 1D 4-column data reader 3 """ 2 4 ##################################################################### 3 5 #This software was developed by the University of Tennessee as part of the 4 6 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 5 #project funded by the US National Science Foundation. 7 #project funded by the US National Science Foundation. 6 8 #See the license text in license.txt 7 9 #copyright 2008, University of Tennessee 8 10 ###################################################################### 9 10 11 import numpy 11 12 import os … … 24 25 """ 25 26 ## File type 26 type_name = "HFIR 1D" 27 type_name = "HFIR 1D" 27 28 ## Wildcards 28 29 type = ["HFIR 1D files (*.d1d)|*.d1d"] 29 30 ## List of allowed extensions 30 ext = ['.d1d'] 31 ext = ['.d1d'] 31 32 32 33 def read(self, path): … … 42 43 """ 43 44 if os.path.isfile(path): 44 basename 45 basename = os.path.basename(path) 45 46 root, extension = os.path.splitext(basename) 46 47 if extension.lower() in self.ext: 47 48 try: 48 input_f = 49 except 49 input_f = open(path,'r') 50 except: 50 51 raise RuntimeError, "hfir1d_reader: cannot open %s" % path 51 52 buff = input_f.read() 52 53 lines = buff.split('\n') 53 x 54 y 54 x = numpy.zeros(0) 55 y = numpy.zeros(0) 55 56 dx = numpy.zeros(0) 56 57 dy = numpy.zeros(0) … … 84 85 85 86 if data_conv_i is not None: 86 _y = data_conv_i(_y, units=output.y_unit) 87 _dy = data_conv_i(_dy, units=output.y_unit) 87 _y = data_conv_i(_y, units=output.y_unit) 88 _dy = data_conv_i(_dy, units=output.y_unit) 88 89 89 x = numpy.append(x, _x)90 y 91 dx 92 dy = numpy.append(dy, _dy)90 x = numpy.append(x, _x) 91 y = numpy.append(y, _y) 92 dx = numpy.append(dx, _dx) 93 dy = numpy.append(dy, _dy) 93 94 except: 94 95 # Couldn't parse this line, skip it 95 96 pass 96 97 97 98 98 # Sanity check 99 99 if not len(y) == len(dy): … … 120 120 output.yaxis("\\rm{Intensity}", output.y_unit) 121 121 else: 122 output.yaxis("\\rm{Intensity}", "cm^{-1}")122 output.yaxis("\\rm{Intensity}", "cm^{-1}") 123 123 124 124 # Store loading process information 125 output.meta_data['loader'] = self.type_name 125 output.meta_data['loader'] = self.type_name 126 126 return output 127 127 else: 128 128 raise RuntimeError, "%s is not a file" % path 129 129 return None 130 131 if __name__ == "__main__":132 reader = Reader()133 print reader.read("../test/S2-30dq.d1d")134 135 136 -
sansdataloader/src/sans/dataloader/readers/red2d_reader.py
rad8034f r7d6351e 1 1 """ 2 TXT/IGOR 2D Q Map file reader 3 """ 2 4 ##################################################################### 3 5 #This software was developed by the University of Tennessee as part of the … … 7 9 #copyright 2008, University of Tennessee 8 10 ###################################################################### 9 10 """11 TXT/IGOR 2D Q Map file reader12 """13 14 15 11 import os 16 #import sys17 12 import numpy 18 13 import math 19 #import logging20 14 from sans.dataloader.data_info import Data2D, Detector 21 15 … … 26 20 except: 27 21 has_converter = False 22 28 23 29 24 def check_point(x_point): … … 36 31 except: 37 32 return 0 33 38 34 39 35 class Reader: 40 36 """ Simple data reader for Igor data files """ 41 37 ## File type 42 type_name = "IGOR/DAT 2D Q_map" 38 type_name = "IGOR/DAT 2D Q_map" 43 39 ## Wildcards 44 40 type = ["IGOR/DAT 2D file in Q_map (*.dat)|*.DAT"] 45 41 ## Extension 46 ext =['.DAT', '.dat']42 ext = ['.DAT', '.dat'] 47 43 48 44 def write(self, filename, data): … … 51 47 52 48 :param filename: file name to write 53 :param data: data2D 49 :param data: data2D 54 50 """ 55 51 import time 56 st = time.time()57 52 # Write the file 58 53 fd = open(filename, 'w') … … 66 61 # write qx qy I values 67 62 for i in range(len(data.data)): 68 fd.write("%g %g %g\n" % (data.qx_data[i], 63 fd.write("%g %g %g\n" % (data.qx_data[i], 69 64 data.qy_data[i], 70 65 data.data[i])) 71 66 # close 72 fd.close() 73 74 def read(self, filename=None):67 fd.close() 68 69 def read(self, filename=None): 75 70 """ Read file """ 76 71 if not os.path.isfile(filename): … … 79 74 80 75 # Read file 81 f = open(filename, 'r')76 f = open(filename, 'r') 82 77 buf = f.read() 83 f.close() 78 f.close() 84 79 # Instantiate data object 85 80 output = Data2D() 86 81 output.filename = os.path.basename(filename) 87 82 detector = Detector() 88 if len(output.detector)>0: print str(output.detector[0]) 83 if len(output.detector) > 0: 84 print str(output.detector[0]) 89 85 output.detector.append(detector) 90 86 … … 92 88 dataStarted = False 93 89 94 ## Defaults 90 ## Defaults 95 91 lines = buf.split('\n') 96 itot = 097 92 x = [] 98 93 y = [] 99 94 100 ncounts = 0 101 102 wavelength = None 103 distance = None 95 wavelength = None 96 distance = None 104 97 transmission = None 105 98 … … 107 100 pixel_y = None 108 101 109 i_x = 0 110 i_y = -1 111 pixels = 0 112 113 isInfo = False 102 isInfo = False 114 103 isCenter = False 115 104 … … 126 115 data_conv_i = Converter('1/cm') 127 116 # Test it 128 data_conv_i(1.0, output.I_unit) 117 data_conv_i(1.0, output.I_unit) 129 118 130 119 … … 132 121 # to calculate the exact number of data points 133 122 count = 0 134 while (len(lines[len(lines) -(count+1)].lstrip().rstrip()) < 1):135 del lines[len(lines) -(count+1)]123 while (len(lines[len(lines) - (count + 1)].lstrip().rstrip()) < 1): 124 del lines[len(lines) - (count + 1)] 136 125 count = count + 1 137 126 … … 140 129 # Old version NIST files: 0 141 130 ver = 0 142 for line in lines: 131 for line in lines: 143 132 line_num += 1 144 133 ## Reading the header applies only to IGOR/NIST 2D q_map data files … … 182 171 # Find center info line 183 172 if isCenter: 184 isCenter = False 173 isCenter = False 185 174 line_toks = line.split() 186 175 # Center in bin number … … 199 188 continue 200 189 201 ## Read and get data. 190 ## Read and get data. 202 191 if dataStarted == True: 203 line_toks = line.split() 192 line_toks = line.split() 204 193 if len(line_toks) == 0: 205 194 #empty line … … 223 212 data_list = data_list.split() 224 213 225 # Check if the size is consistent with data, otherwise 214 # Check if the size is consistent with data, otherwise 226 215 #try the tab(\t) separator 227 # (this may be removed once get the confidence 216 # (this may be removed once get the confidence 228 217 #the former working all cases). 229 218 if len(data_list) != (len(data_lines)) * col_num: … … 233 222 # Change it(string) into float 234 223 #data_list = map(float,data_list) 235 data_list1 = map(check_point, data_list)224 data_list1 = map(check_point, data_list) 236 225 237 226 # numpy array form 238 227 data_array = numpy.array(data_list1) 239 # Redimesion based on the row_num and col_num, 228 # Redimesion based on the row_num and col_num, 240 229 #otherwise raise an error. 241 230 try: … … 250 239 err_data = numpy.ones(row_num) 251 240 qz_data = numpy.zeros(row_num) 252 mask = numpy.ones(row_num, dtype=bool)241 mask = numpy.ones(row_num, dtype=bool) 253 242 # Get from the array 254 243 qx_data = data_point[0] … … 256 245 data = data_point[2] 257 246 if ver == 1: 258 if col_num > (2 + ver): err_data = data_point[(2 + ver)] 259 if col_num > (3 + ver): qz_data = data_point[(3 + ver)] 260 if col_num > (4 + ver): dqx_data = data_point[(4 + ver)] 261 if col_num > (5 + ver): dqy_data = data_point[(5 + ver)] 247 if col_num > (2 + ver): 248 err_data = data_point[(2 + ver)] 249 if col_num > (3 + ver): 250 qz_data = data_point[(3 + ver)] 251 if col_num > (4 + ver): 252 dqx_data = data_point[(4 + ver)] 253 if col_num > (5 + ver): 254 dqy_data = data_point[(5 + ver)] 262 255 #if col_num > (6 + ver): mask[data_point[(6 + ver)] < 1] = False 263 256 q_data = numpy.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) … … 265 258 # Extra protection(it is needed for some data files): 266 259 # If all mask elements are False, put all True 267 if not mask.any(): mask[mask==False] = True 260 if not mask.any(): 261 mask[mask == False] = True 268 262 269 263 # Store limits of the image in q space 270 xmin 271 xmax 272 ymin 273 ymax 264 xmin = numpy.min(qx_data) 265 xmax = numpy.max(qx_data) 266 ymin = numpy.min(qy_data) 267 ymax = numpy.max(qy_data) 274 268 275 269 # units … … 286 280 # calculate the number of pixels in the each axes 287 281 npix_y = math.floor(math.sqrt(len(data))) 288 npix_x = math.floor(len(data) /npix_y)289 290 # calculate the size of bins 291 xstep = x_size /(npix_x-1)292 ystep = y_size /(npix_y-1)282 npix_x = math.floor(len(data) / npix_y) 283 284 # calculate the size of bins 285 xstep = x_size / (npix_x - 1) 286 ystep = y_size / (npix_y - 1) 293 287 294 288 # store x and y axis bin centers in q space 295 x_bins = numpy.arange(xmin,xmax+xstep,xstep)296 y_bins = numpy.arange(ymin,ymax+ystep,ystep)289 x_bins = numpy.arange(xmin, xmax + xstep, xstep) 290 y_bins = numpy.arange(ymin, ymax + ystep, ystep) 297 291 298 292 # get the limits of q values 299 xmin = xmin - xstep /2300 xmax = xmax + xstep /2301 ymin = ymin - ystep /2302 ymax = ymax + ystep /2303 304 #Store data in outputs 305 #TODO: Check the lengths 306 output.data 293 xmin = xmin - xstep / 2 294 xmax = xmax + xstep / 2 295 ymin = ymin - ystep / 2 296 ymax = ymax + ystep / 2 297 298 #Store data in outputs 299 #TODO: Check the lengths 300 output.data = data 307 301 if (err_data == 1).all(): 308 302 output.err_data = numpy.sqrt(numpy.abs(data)) … … 311 305 output.err_data = err_data 312 306 313 output.qx_data 314 output.qy_data = qy_data315 output.q_data 316 output.mask = mask307 output.qx_data = qx_data 308 output.qy_data = qy_data 309 output.q_data = q_data 310 output.mask = mask 317 311 318 312 output.x_bins = x_bins … … 334 328 335 329 # optional data: if all of dq data == 0, do not pass to output 336 if len(dqx_data) == len(qx_data) and dqx_data.any() !=0:330 if len(dqx_data) == len(qx_data) and dqx_data.any() != 0: 337 331 # if no dqx_data, do not pass dqy_data. 338 332 #(1 axis dq is not supported yet). 339 if len(dqy_data) == len(qy_data) and dqy_data.any() !=0:333 if len(dqy_data) == len(qy_data) and dqy_data.any() != 0: 340 334 # Currently we do not support dq parr, perp. 341 335 # tranfer the comp. to cartesian coord. for newer version. … … 346 340 output.dqx_data = numpy.sqrt((dqx_data * cos_th) * \ 347 341 (dqx_data * cos_th) \ 348 + ( 349 ( 342 + (dqy_data * sin_th) * \ 343 (dqy_data * sin_th)) 350 344 output.dqy_data = numpy.sqrt((dqx_data * sin_th) * \ 351 345 (dqx_data * sin_th) \ 352 + ( 353 ( 346 + (dqy_data * cos_th) * \ 347 (dqy_data * cos_th)) 354 348 else: 355 349 output.dqx_data = dqx_data … … 362 356 else: 363 357 output.xaxis("\\rm{Q_{x}}", 'A^{-1}') 364 output.yaxis("\\rm{Q_{y}}", 'A^{-1}') 358 output.yaxis("\\rm{Q_{y}}", 'A^{-1}') 365 359 if data_conv_i is not None: 366 360 output.zaxis("\\rm{Intensity}", output.I_unit) 367 361 else: 368 output.zaxis("\\rm{Intensity}", "cm^{-1}")362 output.zaxis("\\rm{Intensity}", "cm^{-1}") 369 363 370 364 # Store loading process information … … 372 366 373 367 return output 374 375 if __name__ == "__main__":376 reader = Reader()377 print reader.read("../test/exp18_14_igor_2dqxqy.dat")378 379 -
sansdataloader/src/sans/dataloader/readers/tiff_reader.py
rad8034f r7d6351e 1 2 3 1 ##################################################################### 4 2 #This software was developed by the University of Tennessee as part of the … … 9 7 ###################################################################### 10 8 """ 11 Image reader. Untested. 9 Image reader. Untested. 12 10 """ 13 14 15 11 #TODO: load and check data and orientation of the image (needs rendering) 16 17 import math, logging, os 12 import math 13 import logging 14 import os 18 15 import numpy 19 16 from sans.dataloader.data_info import Data2D 17 20 18 21 19 class Reader: … … 24 22 """ 25 23 ## File type 26 type_name = "TIF" 24 type_name = "TIF" 27 25 ## Wildcards 28 26 type = ["TIF files (*.tif)|*.tif", … … 30 28 ] 31 29 ## Extension 32 ext = ['.tif', '.tiff']30 ext = ['.tif', '.tiff'] 33 31 34 32 def read(self, filename=None): … … 51 49 try: 52 50 im = Image.open(filename) 53 except 54 raise RuntimeError, "cannot open %s"%(filename)51 except: 52 raise RuntimeError, "cannot open %s"%(filename) 55 53 data = im.getdata() 56 54 57 55 # Initiazed the output data object 58 output.data = numpy.zeros([im.size[0], im.size[1]])59 output.err_data = numpy.zeros([im.size[0], im.size[1]])56 output.data = numpy.zeros([im.size[0], im.size[1]]) 57 output.err_data = numpy.zeros([im.size[0], im.size[1]]) 60 58 61 # Initialize 59 # Initialize 62 60 x_vals = [] 63 y_vals = [] 61 y_vals = [] 64 62 65 63 # x and y vectors … … 79 77 80 78 # Get bin number 81 if math.fmod(itot, im.size[0]) ==0:79 if math.fmod(itot, im.size[0]) == 0: 82 80 i_x = 0 83 81 i_y += 1 … … 85 83 i_x += 1 86 84 87 output.data[im.size[1] -1-i_y][i_x] = value85 output.data[im.size[1] - 1 - i_y][i_x] = value 88 86 89 87 itot += 1 90 88 91 output.xbins 92 output.ybins 93 output.x_bins 94 output.y_bins 95 output.xmin 96 output.xmax = im.size[0]-197 output.ymin 98 output.ymax = im.size[0]-189 output.xbins = im.size[0] 90 output.ybins = im.size[1] 91 output.x_bins = x_vals 92 output.y_bins = y_vals 93 output.xmin = 0 94 output.xmax = im.size[0] - 1 95 output.ymin = 0 96 output.ymax = im.size[0] - 1 99 97 100 98 # Store loading process information 101 output.meta_data['loader'] = self.type_name 99 output.meta_data['loader'] = self.type_name 102 100 return output 103 104 105 if __name__ == "__main__":106 reader = Reader()107 print reader.read("../test/MP_New.sans")108 109
Note: See TracChangeset
for help on using the changeset viewer.