Changeset a7a5886 in sasview for DataLoader/readers
- Timestamp:
- Nov 2, 2010 3:02:56 PM (14 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.1.1, release-4.1.2, release-4.2.2, release_4.0.1, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- da9ac4e6
- Parents:
- 44148f0
- Location:
- DataLoader/readers
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
DataLoader/readers/IgorReader.py
r0997158f ra7a5886 15 15 """ 16 16 17 import os, sys 17 import os 18 #import sys 18 19 import numpy 19 import math, logging 20 from DataLoader.data_info import Data2D, Detector 20 import math 21 #import logging 22 from DataLoader.data_info import Data2D 23 from DataLoader.data_info import Detector 21 24 from DataLoader.manipulations import reader2D_converter 22 25 … … 100 103 wavelength = float(line_toks[1]) 101 104 except: 102 raise ValueError,"IgorReader: can't read this file, missing wavelength" 105 msg = "IgorReader: can't read this file, missing wavelength" 106 raise ValueError, msg 103 107 104 108 #Find # of bins in a row assuming the detector is square. … … 130 134 wavelength = float(line_toks[1]) 131 135 except: 132 raise ValueError,"IgorReader: can't read this file, missing wavelength" 136 msg = "IgorReader: can't read this file, missing wavelength" 137 raise ValueError, msg 133 138 # Distance in meters 134 139 try: 135 140 distance = float(line_toks[3]) 136 141 except: 137 raise ValueError,"IgorReader: can't read this file, missing distance" 142 msg = "IgorReader: can't read this file, missing distance" 143 raise ValueError, msg 138 144 139 145 # Distance in meters … … 141 147 transmission = float(line_toks[4]) 142 148 except: 143 raise ValueError,"IgorReader: can't read this file, missing transmission" 149 msg = "IgorReader: can't read this file, " 150 msg += "missing transmission" 151 raise ValueError, msg 144 152 145 153 if line.count("LAMBDA")>0: … … 151 159 line_toks = line.split() 152 160 153 # Center in bin number: Must substrate 1 because the index starts from 1 161 # Center in bin number: Must substrate 1 because 162 #the index starts from 1 154 163 center_x = float(line_toks[0])-1 155 164 center_y = float(line_toks[1])-1 … … 168 177 or center_x == None \ 169 178 or center_y == None: 170 raise ValueError, "IgorReader:Missing information in data file" 179 msg = "IgorReader:Missing information in data file" 180 raise ValueError, msg 171 181 172 182 if dataStarted == True: … … 190 200 # Q = 4pi/lambda sin(theta/2) 191 201 # Bin size is 0.5 cm 192 #REmoved +1 from theta = (i_x-center_x+1)*0.5 / distance / 100.0 and 193 #REmoved +1 from theta = (i_y-center_y+1)*0.5 / distance / 100.0 194 #ToDo: Need complete check if the following covert process is consistent with fitting.py. 202 #REmoved +1 from theta = (i_x-center_x+1)*0.5 / distance 203 # / 100.0 and 204 #REmoved +1 from theta = (i_y-center_y+1)*0.5 / 205 # distance / 100.0 206 #ToDo: Need complete check if the following 207 # covert process is consistent with fitting.py. 195 208 theta = (i_x-center_x)*0.5 / distance / 100.0 196 209 qx = 4.0*math.pi/wavelength * math.sin(theta/2.0) -
DataLoader/readers/abs_reader.py
r0997158f ra7a5886 1 """ 2 """ 1 3 ##################################################################### 2 4 #This software was developed by the University of Tennessee as part of the … … 9 11 import numpy 10 12 import os 11 from DataLoader.data_info import Data1D, Detector 13 from DataLoader.data_info import Data1D 14 from DataLoader.data_info import Detector 12 15 13 16 has_converter = True … … 26 29 type = ["IGOR 1D files (*.abs)|*.abs"] 27 30 ## List of allowed extensions 28 ext =['.abs', '.ABS']31 ext = ['.abs', '.ABS'] 29 32 30 33 def read(self, path): … … 78 81 79 82 # Information line 1 80 if is_info ==True:83 if is_info == True: 81 84 is_info = False 82 85 line_toks = line.split() … … 85 88 try: 86 89 value = float(line_toks[1]) 87 if has_converter==True and output.source.wavelength_unit != 'A': 90 if has_converter == True and \ 91 output.source.wavelength_unit != 'A': 88 92 conv = Converter('A') 89 output.source.wavelength = conv(value, units=output.source.wavelength_unit) 93 output.source.wavelength = conv(value, 94 units=output.source.wavelength_unit) 90 95 else: 91 96 output.source.wavelength = value 92 97 except: 93 98 #goes to ASC reader 94 raise RuntimeError, "abs_reader: cannot open %s" % path 95 #raise ValueError,"IgorReader: can't read this file, missing wavelength" 99 msg = "abs_reader: cannot open %s" % path 100 raise RuntimeError, msg 101 #raise ValueError,"IgorReader: can't read this file, 102 # missing wavelength" 96 103 97 104 # Distance in meters 98 105 try: 99 106 value = float(line_toks[3]) 100 if has_converter==True and detector.distance_unit != 'm': 107 if has_converter == True and \ 108 detector.distance_unit != 'm': 101 109 conv = Converter('m') 102 detector.distance = conv(value, units=detector.distance_unit) 110 detector.distance = conv(value, 111 units=detector.distance_unit) 103 112 else: 104 113 detector.distance = value 105 114 except: 106 115 #goes to ASC reader 107 raise RuntimeError,"abs_reader: cannot open %s" % path108 116 msg = "abs_reader: cannot open %s" % path 117 raise RuntimeError, msg 109 118 # Transmission 110 119 try: … … 117 126 try: 118 127 value = float(line_toks[5]) 119 if has_converter==True and output.sample.thickness_unit != 'cm': 128 if has_converter == True and \ 129 output.sample.thickness_unit != 'cm': 120 130 conv = Converter('cm') 121 output.sample.thickness = conv(value, units=output.sample.thickness_unit) 131 output.sample.thickness = conv(value, 132 units=output.sample.thickness_unit) 122 133 else: 123 134 output.sample.thickness = value … … 126 137 pass 127 138 128 #MON CNT LAMBDA DET ANG DET DIST TRANS THICK AVE STEP 129 if line.count("LAMBDA")>0: 139 #MON CNT LAMBDA DET ANG DET DIST TRANS THICK 140 # AVE STEP 141 if line.count("LAMBDA") > 0: 130 142 is_info = True 131 143 132 144 # Find center info line 133 if is_center ==True:145 if is_center == True: 134 146 is_center = False 135 147 line_toks = line.split() … … 139 151 140 152 # Bin size 141 if has_converter==True and detector.pixel_size_unit != 'mm': 153 if has_converter == True and \ 154 detector.pixel_size_unit != 'mm': 142 155 conv = Converter('mm') 143 detector.pixel_size.x = conv(5.0, units=detector.pixel_size_unit) 144 detector.pixel_size.y = conv(5.0, units=detector.pixel_size_unit) 156 detector.pixel_size.x = conv(5.0, 157 units=detector.pixel_size_unit) 158 detector.pixel_size.y = conv(5.0, 159 units=detector.pixel_size_unit) 145 160 else: 146 161 detector.pixel_size.x = 5.0 … … 149 164 # Store beam center in distance units 150 165 # Det 640 x 640 mm 151 if has_converter==True and detector.beam_center_unit != 'mm': 166 if has_converter==True and \ 167 detector.beam_center_unit != 'mm': 152 168 conv = Converter('mm') 153 detector.beam_center.x = conv(center_x*5.0, units=detector.beam_center_unit) 154 detector.beam_center.y = conv(center_y*5.0, units=detector.beam_center_unit) 169 detector.beam_center.x = conv(center_x * 5.0, 170 units=detector.beam_center_unit) 171 detector.beam_center.y = conv(center_y * 5.0, 172 units=detector.beam_center_unit) 155 173 else: 156 detector.beam_center.x = center_x *5.0157 detector.beam_center.y = center_y *5.0174 detector.beam_center.x = center_x * 5.0 175 detector.beam_center.y = center_y * 5.0 158 176 159 177 # Detector type … … 164 182 pass 165 183 166 #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L BSTOP(mm) DET_TYP 167 if line.count("BCENT")>0: 184 #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L 185 # BSTOP(mm) DET_TYP 186 if line.count("BCENT") > 0: 168 187 is_center = True 169 188 170 189 # Parse the data 171 if is_data_started ==True:190 if is_data_started == True: 172 191 toks = line.split() 173 192 … … 197 216 pass 198 217 199 #The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 218 #The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 219 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 200 220 if line.count("The 6 columns")>0: 201 221 is_data_started = True … … 203 223 # Sanity check 204 224 if not len(y) == len(dy): 205 raise ValueError,"abs_reader: y and dy have different length"206 225 msg = "abs_reader: y and dy have different length" 226 raise ValueError, msg 207 227 # If the data length is zero, consider this as 208 228 # though we were not able to read the file. 209 if len(x) ==0:229 if len(x) == 0: 210 230 raise ValueError, "ascii_reader: could not load file" 211 212 231 output.x = x 213 232 output.y = y -
DataLoader/readers/ascii_reader.py
rfca90f82 ra7a5886 36 36 "ASCII files (*.abs)|*.abs"] 37 37 ## List of allowed extensions 38 ext =['.txt', '.TXT', '.dat', '.DAT', '.abs', '.ABS']38 ext = ['.txt', '.TXT', '.dat', '.DAT', '.abs', '.ABS'] 39 39 40 40 ## Flag to bypass extension check … … 54 54 if os.path.isfile(path): 55 55 basename = os.path.basename(path) 56 root, extension = os.path.splitext(basename)56 _, extension = os.path.splitext(basename) 57 57 if self.allow_all or extension.lower() in self.ext: 58 58 try: … … 63 63 lines = buff.split('\n') 64 64 65 #Jae could not find python universal line spliter: keep the below for now 66 # some ascii data has \r line separator, try it when the data is on only one long line 65 #Jae could not find python universal line spliter: 66 #keep the below for now 67 # some ascii data has \r line separator, 68 # try it when the data is on only one long line 67 69 if len(lines) < 2 : 68 70 lines = buff.split('\r') … … 103 105 #Initialize counters for data lines and header lines. 104 106 is_data = False #Has more than 5 lines 105 mum_data_lines = 5 # More than "5" lines of data is considered as actual data unless that is the only data 106 107 i=-1 # To count # of current data candidate lines 108 i1=-1 # To count total # of previous data candidate lines 109 j=-1 # To count # of header lines 110 j1=-1 # Helps to count # of header lines 111 lentoks = 2 # minimum required number of columns of data; ( <= 4). 112 107 # More than "5" lines of data is considered as actual 108 # data unless that is the only data 109 mum_data_lines = 5 110 # To count # of current data candidate lines 111 i = -1 112 # To count total # of previous data candidate lines 113 i1 = -1 114 # To count # of header lines 115 j = -1 116 # Helps to count # of header lines 117 j1 = -1 118 #minimum required number of columns of data; ( <= 4). 119 lentoks = 2 113 120 for line in lines: 114 121 toks = line.split() 115 116 122 try: 117 123 #Make sure that all columns are numbers. … … 140 146 # third column. 141 147 _dy = None 142 if len(toks) >2:148 if len(toks) > 2: 143 149 try: 144 150 _dy = float(toks[2]) … … 158 164 #Check for dx 159 165 _dx = None 160 if len(toks) >3:166 if len(toks) > 3: 161 167 try: 162 168 _dx = float(toks[3]) … … 174 180 has_error_dx = False if _dx == None else True 175 181 176 #After talked with PB, we decided to take care of only 4 columns of data for now. 182 #After talked with PB, we decided to take care of only 183 # 4 columns of data for now. 177 184 #number of columns in the current line 178 #To remember the # of columns in the current line of data 185 #To remember the # of columns in the current 186 #line of data 179 187 new_lentoks = len(toks) 180 188 181 #If the previous columns not equal to the current, mark the previous as non-data and reset the dependents. 189 #If the previous columns not equal to the current, 190 #mark the previous as non-data and reset the dependents. 182 191 if lentoks != new_lentoks : 183 192 if is_data == True: … … 190 199 191 200 192 #Delete the previously stored lines of data candidates if is not data. 193 if i < 0 and -1< i1 < mum_data_lines and is_data == False: 201 #Delete the previously stored lines of data candidates 202 # if is not data. 203 if i < 0 and -1 < i1 < mum_data_lines and \ 204 is_data == False: 194 205 try: 195 x= numpy.zeros(0) 196 y= numpy.zeros(0) 197 206 x = numpy.zeros(0) 207 y = numpy.zeros(0) 198 208 except: 199 209 pass … … 203 213 204 214 if has_error_dy == True: 205 #Delete the previously stored lines of data candidates if is not data. 206 if i < 0 and -1< i1 < mum_data_lines and is_data== False: 215 #Delete the previously stored lines of 216 # data candidates if is not data. 217 if i < 0 and -1 < i1 < mum_data_lines and \ 218 is_data == False: 207 219 try: 208 220 dy = numpy.zeros(0) … … 212 224 213 225 if has_error_dx == True: 214 #Delete the previously stored lines of data candidates if is not data. 215 if i < 0 and -1< i1 < mum_data_lines and is_data== False: 226 #Delete the previously stored lines of 227 # data candidates if is not data. 228 if i < 0 and -1 < i1 < mum_data_lines and \ 229 is_data == False: 216 230 try: 217 231 dx = numpy.zeros(0) … … 221 235 222 236 #Same for temp. 223 #Delete the previously stored lines of data candidates if is not data. 224 if i < 0 and -1< i1 < mum_data_lines and is_data== False: 237 #Delete the previously stored lines of data candidates 238 # if is not data. 239 if i < 0 and -1 < i1 < mum_data_lines and\ 240 is_data == False: 225 241 try: 226 242 tx = numpy.zeros(0) 227 243 ty = numpy.zeros(0) 228 244 except: 229 pass 245 pass 230 246 231 247 tx = numpy.append(tx, _x) … … 233 249 234 250 if has_error_dy == True: 235 #Delete the previously stored lines of data candidates if is not data. 236 if i < 0 and -1<i1 < mum_data_lines and is_data== False: 251 #Delete the previously stored lines of 252 # data candidates if is not data. 253 if i < 0 and -1 < i1 < mum_data_lines and \ 254 is_data == False: 237 255 try: 238 256 tdy = numpy.zeros(0) … … 241 259 tdy = numpy.append(tdy, _dy) 242 260 if has_error_dx == True: 243 #Delete the previously stored lines of data candidates if is not data. 244 if i < 0 and -1< i1 < mum_data_lines and is_data== False: 261 #Delete the previously stored lines of 262 # data candidates if is not data. 263 if i < 0 and -1 < i1 < mum_data_lines and \ 264 is_data == False: 245 265 try: 246 266 tdx = numpy.zeros(0) 247 267 except: 248 pass 268 pass 249 269 tdx = numpy.append(tdx, _dx) 250 270 251 271 #reset i1 and flag lentoks for the next 252 if lentoks < new_lentoks 272 if lentoks < new_lentoks: 253 273 if is_data == False: 254 274 i1 = -1 255 #To remember the # of columns on the current line for the next line of data 275 #To remember the # of columns on the current line 276 # for the next line of data 256 277 lentoks = len(toks) 257 278 258 #Reset # of header lines and counts # of data candidate lines 259 if j == 0 and j1 ==0: 279 #Reset # of header lines and counts # 280 # of data candidate lines 281 if j == 0 and j1 == 0: 260 282 i1 = i + 1 261 i+=1 262 283 i += 1 263 284 except: 264 285 … … 268 289 lentoks = 2 269 290 #Counting # of header lines 270 j +=1271 if j == j1 +1:291 j += 1 292 if j == j1 + 1: 272 293 j1 = j 273 294 else: … … 279 300 pass 280 301 281 282 302 input_f.close() 283 303 # Sanity check 284 304 if has_error_dy == True and not len(y) == len(dy): 285 raise RuntimeError, "ascii_reader: y and dy have different length" 305 msg = "ascii_reader: y and dy have different length" 306 raise RuntimeError, msg 286 307 if has_error_dx == True and not len(x) == len(dx): 287 raise RuntimeError,"ascii_reader: y and dy have different length"288 308 msg = "ascii_reader: y and dy have different length" 309 raise RuntimeError, msg 289 310 # If the data length is zero, consider this as 290 311 # though we were not able to read the file. 291 if len(x) ==0:312 if len(x) == 0: 292 313 raise RuntimeError, "ascii_reader: could not load file" 293 314 294 #Let's re-order the data to make cal. curve look better some cases 295 ind = numpy.lexsort((ty,tx)) 315 #Let's re-order the data to make cal. 316 # curve look better some cases 317 ind = numpy.lexsort((ty, tx)) 296 318 for i in ind: 297 319 x[i] = tx[ind[i]] … … 301 323 if has_error_dx == True: 302 324 dx[i] = tdx[ind[i]] 303 304 305 325 #Data 306 326 output.x = x -
DataLoader/readers/associations.py
r0997158f ra7a5886 19 19 """ 20 20 21 import os, sys 21 import os 22 import sys 22 23 import logging 23 24 from lxml import etree … … 52 53 53 54 # Read in the file extension associations 54 entry_list = root.xpath('/ns:SansLoader/ns:FileType', namespaces={'ns': VERSION}) 55 entry_list = root.xpath('/ns:SansLoader/ns:FileType', 56 namespaces={'ns': VERSION}) 55 57 56 58 # For each FileType entry, get the associated reader and extension … … 61 63 if reader is not None and ext is not None: 62 64 # Associate the extension with a particular reader 63 # TODO: Modify the Register code to be case-insensitive and remove the64 # 65 # TODO: Modify the Register code to be case-insensitive 66 # and remove the extra line below. 65 67 try: 66 68 exec "import %s" % reader 67 exec "loader.associate_file_type('%s', %s)" % (ext.lower(), reader) 68 exec "loader.associate_file_type('%s', %s)" % (ext.upper(), reader) 69 exec "loader.associate_file_type('%s', %s)" % (ext.lower(), 70 reader) 71 exec "loader.associate_file_type('%s', %s)" % (ext.upper(), 72 reader) 69 73 except: 70 logging.error("read_associations: skipping association for %s\n %s" % (attr['extension'], sys.exc_value)) 74 msg = "read_associations: skipping association" 75 msg += " for %s\n %s" % (attr['extension'], sys.exc_value) 76 logging.error(msg) 71 77 72 78 -
DataLoader/readers/cansas_reader.py
r7406040 ra7a5886 15 15 # within a single SASentry. Will raise a runtime error. 16 16 17 #TODO: check that all vectors are written only if they have at least one non-empty value 18 #TODO: Writing only allows one SASentry per file. Would be best to allow multiple entries. 17 #TODO: check that all vectors are written only if they have at 18 # least one non-empty value 19 #TODO: Writing only allows one SASentry per file. 20 # Would be best to allow multiple entries. 19 21 #TODO: Store error list 20 22 #TODO: Allow for additional meta data for each section 21 #TODO: Notes need to be implemented. They can be any XML structure in version 1.0 23 #TODO: Notes need to be implemented. They can be any XML 24 # structure in version 1.0 22 25 # Process notes have the same problem. 23 26 #TODO: Unit conversion is not complete (temperature units are missing) … … 26 29 import logging 27 30 import numpy 28 import os, sys 29 from DataLoader.data_info import Data1D, Collimation, Detector, Process, Aperture 31 import os 32 import sys 33 from DataLoader.data_info import Data1D 34 from DataLoader.data_info import Collimation 35 from DataLoader.data_info import Detector 36 from DataLoader.data_info import Process 37 from DataLoader.data_info import Aperture 30 38 from lxml import etree 31 39 import xml.dom.minidom … … 85 93 value = None 86 94 attr = {} 87 88 if len(nodes)>0: 95 if len(nodes) > 0: 89 96 try: 90 97 value = float(nodes[0].text) 91 98 except: 92 99 # Could not pass, skip and return None 93 logging.error("cansas_reader.get_float: could not convert '%s' to float" % nodes[0].text) 94 100 msg = "cansas_reader.get_float: could not " 101 msg += " convert '%s' to float" % nodes[0].text 102 logging.error(msg) 95 103 if nodes[0].get('unit') is not None: 96 104 attr['unit'] = nodes[0].get('unit') 97 98 105 return value, attr 99 106 100 107 101 102 108 class Reader: 103 109 """ … … 117 123 118 124 ## List of allowed extensions 119 ext =['.xml', '.XML','.avex', '.AVEx', '.absx', 'ABSx']125 ext = ['.xml', '.XML','.avex', '.AVEx', '.absx', 'ABSx'] 120 126 121 127 def __init__(self): … … 144 150 tree = etree.parse(path, parser=etree.ETCompatXMLParser()) 145 151 # Check the format version number 146 # Specifying the namespace will take care of the file format version 152 # Specifying the namespace will take care of the file 153 # format version 147 154 root = tree.getroot() 148 155 149 entry_list = root.xpath('/ns:SASroot/ns:SASentry', namespaces={'ns': CANSAS_NS}) 156 entry_list = root.xpath('/ns:SASroot/ns:SASentry', 157 namespaces={'ns': CANSAS_NS}) 150 158 151 159 for entry in entry_list: … … 196 204 197 205 # Look up instrument name 198 self._store_content('ns:SASinstrument/ns:name', dom, 'instrument', data_info) 206 self._store_content('ns:SASinstrument/ns:name', dom, 'instrument', 207 data_info) 199 208 200 209 # Notes … … 207 216 data_info.notes.append(note_value) 208 217 except: 209 err_mess = "cansas_reader.read: error processing entry notes\n %s" % sys.exc_value 218 err_mess = "cansas_reader.read: error processing" 219 err_mess += " entry notes\n %s" % sys.exc_value 210 220 self.errors.append(err_mess) 211 221 logging.error(err_mess) … … 225 235 dom, 'temperature', data_info.sample) 226 236 227 nodes = dom.xpath('ns:SASsample/ns:details', namespaces={'ns': CANSAS_NS}) 237 nodes = dom.xpath('ns:SASsample/ns:details', 238 namespaces={'ns': CANSAS_NS}) 228 239 for item in nodes: 229 240 try: … … 233 244 data_info.sample.details.append(detail_value) 234 245 except: 235 err_mess = "cansas_reader.read: error processing sample details\n %s" % sys.exc_value 246 err_mess = "cansas_reader.read: error processing " 247 err_mess += " sample details\n %s" % sys.exc_value 236 248 self.errors.append(err_mess) 237 249 logging.error(err_mess) … … 284 296 285 297 # Collimation info ################### 286 nodes = dom.xpath('ns:SASinstrument/ns:SAScollimation', namespaces={'ns': CANSAS_NS}) 298 nodes = dom.xpath('ns:SASinstrument/ns:SAScollimation', 299 namespaces={'ns': CANSAS_NS}) 287 300 for item in nodes: 288 301 collim = Collimation() … … 315 328 316 329 # Detector info ###################### 317 nodes = dom.xpath('ns:SASinstrument/ns:SASdetector', namespaces={'ns': CANSAS_NS}) 330 nodes = dom.xpath('ns:SASinstrument/ns:SASdetector', 331 namespaces={'ns': CANSAS_NS}) 318 332 for item in nodes: 319 333 … … 329 343 330 344 # Detector orientation (as a vector) 331 self._store_float('ns:orientation/ns:roll', item, 'orientation.x', detector) 332 self._store_float('ns:orientation/ns:pitch', item, 'orientation.y', detector) 333 self._store_float('ns:orientation/ns:yaw', item, 'orientation.z', detector) 345 self._store_float('ns:orientation/ns:roll', item, 'orientation.x', 346 detector) 347 self._store_float('ns:orientation/ns:pitch', item, 'orientation.y', 348 detector) 349 self._store_float('ns:orientation/ns:yaw', item, 'orientation.z', 350 detector) 334 351 335 352 # Beam center (as a vector) 336 self._store_float('ns:beam_center/ns:x', item, 'beam_center.x', detector) 337 self._store_float('ns:beam_center/ns:y', item, 'beam_center.y', detector) 338 self._store_float('ns:beam_center/ns:z', item, 'beam_center.z', detector) 353 self._store_float('ns:beam_center/ns:x', item, 'beam_center.x', 354 detector) 355 self._store_float('ns:beam_center/ns:y', item, 'beam_center.y', 356 detector) 357 self._store_float('ns:beam_center/ns:z', item, 'beam_center.z', 358 detector) 339 359 340 360 # Pixel size (as a vector) 341 self._store_float('ns:pixel_size/ns:x', item, 'pixel_size.x', detector) 342 self._store_float('ns:pixel_size/ns:y', item, 'pixel_size.y', detector) 343 self._store_float('ns:pixel_size/ns:z', item, 'pixel_size.z', detector) 361 self._store_float('ns:pixel_size/ns:x', item, 'pixel_size.x', 362 detector) 363 self._store_float('ns:pixel_size/ns:y', item, 'pixel_size.y', 364 detector) 365 self._store_float('ns:pixel_size/ns:z', item, 'pixel_size.z', 366 detector) 344 367 345 368 self._store_float('ns:slit_length', item, 'slit_length', detector) … … 365 388 process.term.append(term_attr) 366 389 except: 367 err_mess = "cansas_reader.read: error processing process term\n %s" % sys.exc_value 390 err_mess = "cansas_reader.read: error processing " 391 err_mess += " process term\n %s" % sys.exc_value 368 392 self.errors.append(err_mess) 369 393 logging.error(err_mess) 370 394 371 note_list = item.xpath('ns:SASprocessnote', namespaces={'ns': CANSAS_NS}) 395 note_list = item.xpath('ns:SASprocessnote', 396 namespaces={'ns': CANSAS_NS}) 372 397 for note in note_list: 373 398 if note.text is not None: … … 379 404 # Data info ###################### 380 405 nodes = dom.xpath('ns:SASdata', namespaces={'ns': CANSAS_NS}) 381 if len(nodes)>1: 382 raise RuntimeError, "CanSAS reader is not compatible with multiple SASdata entries" 406 if len(nodes) > 1: 407 msg = "CanSAS reader is not compatible with multiple" 408 msg += " SASdata entries" 409 raise RuntimeError, msg 383 410 384 411 nodes = dom.xpath('ns:SASdata/ns:Idata', namespaces={'ns': CANSAS_NS}) … … 403 430 _dxw = 0.0 404 431 405 if attr.has_key('unit') and attr['unit'].lower() != data_info.x_unit.lower(): 432 if attr.has_key('unit') and \ 433 attr['unit'].lower() != data_info.x_unit.lower(): 406 434 if has_converter==True: 407 435 try: … … 409 437 _x = data_conv_q(_x, units=data_info.x_unit) 410 438 except: 411 raise ValueError, "CanSAS reader: could not convert Q unit [%s]; expecting [%s]\n %s" \ 412 % (attr['unit'], data_info.x_unit, sys.exc_value) 439 msg = "CanSAS reader: could not convert " 440 msg += "Q unit [%s]; " 441 msg += "expecting [%s]\n %s" % (attr['unit'], 442 data_info.x_unit, sys.exc_value) 443 raise ValueError, msg 444 413 445 else: 414 raise ValueError, "CanSAS reader: unrecognized Q unit [%s]; expecting [%s]" \ 415 % (attr['unit'], data_info.x_unit) 446 msg = "CanSAS reader: unrecognized Q unit [%s]; " 447 msg += "expecting [%s]" % (attr['unit'], data_info.x_unit) 448 raise ValueError, msg 449 416 450 # Error in Q 417 if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.x_unit.lower(): 451 if attr_d.has_key('unit') and \ 452 attr_d['unit'].lower() != data_info.x_unit.lower(): 418 453 if has_converter==True: 419 454 try: … … 421 456 _dx = data_conv_q(_dx, units=data_info.x_unit) 422 457 except: 423 raise ValueError, "CanSAS reader: could not convert dQ unit [%s]; expecting [%s]\n %s" \ 424 % (attr['unit'], data_info.x_unit, sys.exc_value) 458 msg = "CanSAS reader: could not convert dQ unit [%s];" 459 msg += " expecting " 460 msg += "[%s]\n %s" % (attr['unit'], 461 data_info.x_unit, sys.exc_value) 462 raise ValueError, msg 463 425 464 else: 426 raise ValueError, "CanSAS reader: unrecognized dQ unit [%s]; expecting [%s]" \ 427 % (attr['unit'], data_info.x_unit) 465 msg = "CanSAS reader: unrecognized dQ unit [%s]; " 466 msg += "expecting [%s]" % (attr['unit'], data_info.x_unit) 467 raise ValueError, msg 468 428 469 # Slit length 429 if attr_l.has_key('unit') and attr_l['unit'].lower() != data_info.x_unit.lower(): 430 if has_converter==True: 470 if attr_l.has_key('unit') and \ 471 attr_l['unit'].lower() != data_info.x_unit.lower(): 472 if has_converter == True: 431 473 try: 432 474 data_conv_q = Converter(attr_l['unit']) 433 475 _dxl = data_conv_q(_dxl, units=data_info.x_unit) 434 476 except: 435 raise ValueError, "CanSAS reader: could not convert dQl unit [%s]; expecting [%s]\n %s" \ 436 % (attr['unit'], data_info.x_unit, sys.exc_value) 477 msg = "CanSAS reader: could not convert dQl unit [%s];" 478 msg += " expecting [%s]\n %s" % (attr['unit'], 479 data_info.x_unit, sys.exc_value) 480 raise ValueError, msg 481 437 482 else: 438 raise ValueError, "CanSAS reader: unrecognized dQl unit [%s]; expecting [%s]" \ 439 % (attr['unit'], data_info.x_unit) 483 msg = "CanSAS reader: unrecognized dQl unit [%s];" 484 msg += " expecting [%s]" % (attr['unit'], data_info.x_unit) 485 raise ValueError, msg 486 440 487 # Slit width 441 if attr_w.has_key('unit') and attr_w['unit'].lower() != data_info.x_unit.lower(): 442 if has_converter==True: 488 if attr_w.has_key('unit') and \ 489 attr_w['unit'].lower() != data_info.x_unit.lower(): 490 if has_converter == True: 443 491 try: 444 492 data_conv_q = Converter(attr_w['unit']) 445 493 _dxw = data_conv_q(_dxw, units=data_info.x_unit) 446 494 except: 447 raise ValueError, "CanSAS reader: could not convert dQw unit [%s]; expecting [%s]\n %s" \ 448 % (attr['unit'], data_info.x_unit, sys.exc_value) 495 msg = "CanSAS reader: could not convert dQw unit [%s];" 496 msg += " expecting [%s]\n %s" % (attr['unit'], 497 data_info.x_unit, sys.exc_value) 498 raise ValueError, msg 499 449 500 else: 450 raise ValueError, "CanSAS reader: unrecognized dQw unit [%s]; expecting [%s]" \451 452 501 msg = "CanSAS reader: unrecognized dQw unit [%s];" 502 msg += " expecting [%s]" % (attr['unit'], data_info.x_unit) 503 raise ValueError, msg 453 504 _y, attr = get_float('ns:I', item) 454 505 _dy, attr_d = get_float('ns:Idev', item) 455 506 if _dy == None: 456 507 _dy = 0.0 457 if attr.has_key('unit') and attr['unit'].lower() != data_info.y_unit.lower(): 508 if attr.has_key('unit') and \ 509 attr['unit'].lower() != data_info.y_unit.lower(): 458 510 if has_converter==True: 459 511 try: … … 461 513 _y = data_conv_i(_y, units=data_info.y_unit) 462 514 except: 463 raise ValueError, "CanSAS reader: could not convert I(q) unit [%s]; expecting [%s]\n %s" \ 464 % (attr['unit'], data_info.y_unit, sys.exc_value) 515 msg = "CanSAS reader: could not convert I(q) unit [%s];" 516 msg += " expecting [%s]\n %s" % (attr['unit'], 517 data_info.y_unit, sys.exc_value) 518 raise ValueError, msg 465 519 else: 466 raise ValueError, "CanSAS reader: unrecognized I(q) unit [%s]; expecting [%s]" \ 467 % (attr['unit'], data_info.y_unit) 468 if attr_d.has_key('unit') and attr_d['unit'].lower() != data_info.y_unit.lower(): 520 msg = "CanSAS reader: unrecognized I(q) unit [%s];" 521 msg += " expecting [%s]" % (attr['unit'], data_info.y_unit) 522 raise ValueError, msg 523 524 if attr_d.has_key('unit') and \ 525 attr_d['unit'].lower() != data_info.y_unit.lower(): 469 526 if has_converter==True: 470 527 try: … … 472 529 _dy = data_conv_i(_dy, units=data_info.y_unit) 473 530 except: 474 raise ValueError, "CanSAS reader: could not convert dI(q) unit [%s]; expecting [%s]\n %s" \ 475 % (attr_d['unit'], data_info.y_unit, sys.exc_value) 531 msg = "CanSAS reader: could not convert dI(q) unit " 532 msg += "[%s]; expecting [%s]\n %s" % (attr_d['unit'], 533 data_info.y_unit, sys.exc_value) 534 raise ValueError, msg 476 535 else: 477 raise ValueError, "CanSAS reader: unrecognized dI(q) unit [%s]; expecting [%s]" \ 478 % (attr_d['unit'], data_info.y_unit) 536 msg = "CanSAS reader: unrecognized dI(q) unit [%s]; " 537 msg += "expecting [%s]" % (attr_d['unit'], data_info.y_unit) 538 raise ValueError, msg 479 539 480 540 if _x is not None and _y is not None: … … 486 546 dxw = numpy.append(dxw, _dxw) 487 547 488 489 548 data_info.x = x 490 549 data_info.y = y … … 532 591 main_node.setAttribute("version", self.version) 533 592 main_node.setAttribute("xmlns", "cansas1d/%s" % self.version) 534 main_node.setAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance") 535 main_node.setAttribute("xsi:schemaLocation", "cansas1d/%s http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd" % self.version) 593 main_node.setAttribute("xmlns:xsi", 594 "http://www.w3.org/2001/XMLSchema-instance") 595 main_node.setAttribute("xsi:schemaLocation", 596 "cansas1d/%s http://svn.smallangles.net/svn/canSAS/1dwg/trunk/cansas1d.xsd" % self.version) 536 597 537 598 doc.appendChild(main_node) … … 543 604 for item in datainfo.run: 544 605 runname = {} 545 if datainfo.run_name.has_key(item) and len(str(datainfo.run_name[item]))>1: 606 if datainfo.run_name.has_key(item) and \ 607 len(str(datainfo.run_name[item]))>1: 546 608 runname = {'name': datainfo.run_name[item] } 547 609 write_node(doc, entry_node, "Run", item, runname) … … 556 618 write_node(doc, pt, "Q", datainfo.x[i], {'unit':datainfo.x_unit}) 557 619 if len(datainfo.y)>=i: 558 write_node(doc, pt, "I", datainfo.y[i], {'unit':datainfo.y_unit}) 620 write_node(doc, pt, "I", datainfo.y[i], 621 {'unit':datainfo.y_unit}) 559 622 if datainfo.dx !=None and len(datainfo.dx)>=i: 560 write_node(doc, pt, "Qdev", datainfo.dx[i], {'unit':datainfo.x_unit}) 623 write_node(doc, pt, "Qdev", datainfo.dx[i], 624 {'unit':datainfo.x_unit}) 561 625 if datainfo.dy !=None and len(datainfo.dy)>=i: 562 write_node(doc, pt, "Idev", datainfo.dy[i], {'unit':datainfo.y_unit}) 626 write_node(doc, pt, "Idev", datainfo.dy[i], 627 {'unit':datainfo.y_unit}) 563 628 564 629 … … 569 634 entry_node.appendChild(sample) 570 635 write_node(doc, sample, "ID", str(datainfo.sample.ID)) 571 write_node(doc, sample, "thickness", datainfo.sample.thickness, {"unit":datainfo.sample.thickness_unit}) 636 write_node(doc, sample, "thickness", datainfo.sample.thickness, 637 {"unit":datainfo.sample.thickness_unit}) 572 638 write_node(doc, sample, "transmission", datainfo.sample.transmission) 573 write_node(doc, sample, "temperature", datainfo.sample.temperature, {"unit":datainfo.sample.temperature_unit}) 639 write_node(doc, sample, "temperature", datainfo.sample.temperature, 640 {"unit":datainfo.sample.temperature_unit}) 574 641 575 642 for item in datainfo.sample.details: … … 577 644 578 645 pos = doc.createElement("position") 579 written = write_node(doc, pos, "x", datainfo.sample.position.x, {"unit":datainfo.sample.position_unit}) 580 written = written | write_node(doc, pos, "y", datainfo.sample.position.y, {"unit":datainfo.sample.position_unit}) 581 written = written | write_node(doc, pos, "z", datainfo.sample.position.z, {"unit":datainfo.sample.position_unit}) 646 written = write_node(doc, pos, "x", datainfo.sample.position.x, 647 {"unit":datainfo.sample.position_unit}) 648 written = written | write_node(doc, pos, "y", 649 datainfo.sample.position.y, 650 {"unit":datainfo.sample.position_unit}) 651 written = written | write_node(doc, pos, "z", 652 datainfo.sample.position.z, 653 {"unit":datainfo.sample.position_unit}) 582 654 if written == True: 583 655 sample.appendChild(pos) 584 656 585 657 ori = doc.createElement("orientation") 586 written = write_node(doc, ori, "roll", datainfo.sample.orientation.x, {"unit":datainfo.sample.orientation_unit}) 587 written = written | write_node(doc, ori, "pitch", datainfo.sample.orientation.y, {"unit":datainfo.sample.orientation_unit}) 588 written = written | write_node(doc, ori, "yaw", datainfo.sample.orientation.z, {"unit":datainfo.sample.orientation_unit}) 658 written = write_node(doc, ori, "roll", 659 datainfo.sample.orientation.x, 660 {"unit":datainfo.sample.orientation_unit}) 661 written = written | write_node(doc, ori, "pitch", 662 datainfo.sample.orientation.y, 663 {"unit":datainfo.sample.orientation_unit}) 664 written = written | write_node(doc, ori, "yaw", 665 datainfo.sample.orientation.z, 666 {"unit":datainfo.sample.orientation_unit}) 589 667 if written == True: 590 668 sample.appendChild(ori) … … 607 685 if datainfo.source.beam_size_name is not None: 608 686 size.setAttribute("name", str(datainfo.source.beam_size_name)) 609 written = write_node(doc, size, "x", datainfo.source.beam_size.x, {"unit":datainfo.source.beam_size_unit}) 610 written = written | write_node(doc, size, "y", datainfo.source.beam_size.y, {"unit":datainfo.source.beam_size_unit}) 611 written = written | write_node(doc, size, "z", datainfo.source.beam_size.z, {"unit":datainfo.source.beam_size_unit}) 687 written = write_node(doc, size, "x", datainfo.source.beam_size.x, 688 {"unit":datainfo.source.beam_size_unit}) 689 written = written | write_node(doc, size, "y", 690 datainfo.source.beam_size.y, 691 {"unit":datainfo.source.beam_size_unit}) 692 written = written | write_node(doc, size, "z", 693 datainfo.source.beam_size.z, 694 {"unit":datainfo.source.beam_size_unit}) 612 695 if written == True: 613 696 source.appendChild(size) 614 697 615 write_node(doc, source, "wavelength", datainfo.source.wavelength, {"unit":datainfo.source.wavelength_unit}) 616 write_node(doc, source, "wavelength_min", datainfo.source.wavelength_min, {"unit":datainfo.source.wavelength_min_unit}) 617 write_node(doc, source, "wavelength_max", datainfo.source.wavelength_max, {"unit":datainfo.source.wavelength_max_unit}) 618 write_node(doc, source, "wavelength_spread", datainfo.source.wavelength_spread, {"unit":datainfo.source.wavelength_spread_unit}) 698 write_node(doc, source, "wavelength", 699 datainfo.source.wavelength, 700 {"unit":datainfo.source.wavelength_unit}) 701 write_node(doc, source, "wavelength_min", 702 datainfo.source.wavelength_min, 703 {"unit":datainfo.source.wavelength_min_unit}) 704 write_node(doc, source, "wavelength_max", 705 datainfo.source.wavelength_max, 706 {"unit":datainfo.source.wavelength_max_unit}) 707 write_node(doc, source, "wavelength_spread", 708 datainfo.source.wavelength_spread, 709 {"unit":datainfo.source.wavelength_spread_unit}) 619 710 620 711 # Collimation … … 625 716 instr.appendChild(coll) 626 717 627 write_node(doc, coll, "length", item.length, {"unit":item.length_unit}) 718 write_node(doc, coll, "length", item.length, 719 {"unit":item.length_unit}) 628 720 629 721 for apert in item.aperture: … … 635 727 coll.appendChild(ap) 636 728 637 write_node(doc, ap, "distance", apert.distance, {"unit":apert.distance_unit}) 729 write_node(doc, ap, "distance", apert.distance, 730 {"unit":apert.distance_unit}) 638 731 639 732 size = doc.createElement("size") 640 733 if apert.size_name is not None: 641 734 size.setAttribute("name", str(apert.size_name)) 642 written = write_node(doc, size, "x", apert.size.x, {"unit":apert.size_unit}) 643 written = written | write_node(doc, size, "y", apert.size.y, {"unit":apert.size_unit}) 644 written = written | write_node(doc, size, "z", apert.size.z, {"unit":apert.size_unit}) 735 written = write_node(doc, size, "x", apert.size.x, 736 {"unit":apert.size_unit}) 737 written = written | write_node(doc, size, "y", apert.size.y, 738 {"unit":apert.size_unit}) 739 written = written | write_node(doc, size, "z", apert.size.z, 740 {"unit":apert.size_unit}) 645 741 if written == True: 646 742 ap.appendChild(size) … … 650 746 det = doc.createElement("SASdetector") 651 747 written = write_node(doc, det, "name", item.name) 652 written = written | write_node(doc, det, "SDD", item.distance, {"unit":item.distance_unit}) 653 written = written | write_node(doc, det, "slit_length", item.slit_length, {"unit":item.slit_length_unit}) 748 written = written | write_node(doc, det, "SDD", item.distance, 749 {"unit":item.distance_unit}) 750 written = written | write_node(doc, det, "slit_length", 751 item.slit_length, 752 {"unit":item.slit_length_unit}) 654 753 if written == True: 655 754 instr.appendChild(det) 656 755 657 756 off = doc.createElement("offset") 658 written = write_node(doc, off, "x", item.offset.x, {"unit":item.offset_unit}) 659 written = written | write_node(doc, off, "y", item.offset.y, {"unit":item.offset_unit}) 660 written = written | write_node(doc, off, "z", item.offset.z, {"unit":item.offset_unit}) 757 written = write_node(doc, off, "x", item.offset.x, 758 {"unit":item.offset_unit}) 759 written = written | write_node(doc, off, "y", item.offset.y, 760 {"unit":item.offset_unit}) 761 written = written | write_node(doc, off, "z", item.offset.z, 762 {"unit":item.offset_unit}) 661 763 if written == True: 662 764 det.appendChild(off) 663 765 664 766 center = doc.createElement("beam_center") 665 written = write_node(doc, center, "x", item.beam_center.x, {"unit":item.beam_center_unit}) 666 written = written | write_node(doc, center, "y", item.beam_center.y, {"unit":item.beam_center_unit}) 667 written = written | write_node(doc, center, "z", item.beam_center.z, {"unit":item.beam_center_unit}) 767 written = write_node(doc, center, "x", item.beam_center.x, 768 {"unit":item.beam_center_unit}) 769 written = written | write_node(doc, center, "y", 770 item.beam_center.y, 771 {"unit":item.beam_center_unit}) 772 written = written | write_node(doc, center, "z", 773 item.beam_center.z, 774 {"unit":item.beam_center_unit}) 668 775 if written == True: 669 776 det.appendChild(center) 670 777 671 778 pix = doc.createElement("pixel_size") 672 written = write_node(doc, pix, "x", item.pixel_size.x, {"unit":item.pixel_size_unit}) 673 written = written | write_node(doc, pix, "y", item.pixel_size.y, {"unit":item.pixel_size_unit}) 674 written = written | write_node(doc, pix, "z", item.pixel_size.z, {"unit":item.pixel_size_unit}) 779 written = write_node(doc, pix, "x", item.pixel_size.x, 780 {"unit":item.pixel_size_unit}) 781 written = written | write_node(doc, pix, "y", item.pixel_size.y, 782 {"unit":item.pixel_size_unit}) 783 written = written | write_node(doc, pix, "z", item.pixel_size.z, 784 {"unit":item.pixel_size_unit}) 675 785 if written == True: 676 786 det.appendChild(pix) 677 787 678 788 ori = doc.createElement("orientation") 679 written = write_node(doc, ori, "roll", item.orientation.x, {"unit":item.orientation_unit}) 680 written = written | write_node(doc, ori, "pitch", item.orientation.y, {"unit":item.orientation_unit}) 681 written = written | write_node(doc, ori, "yaw", item.orientation.z, {"unit":item.orientation_unit}) 789 written = write_node(doc, ori, "roll", item.orientation.x, 790 {"unit":item.orientation_unit}) 791 written = written | write_node(doc, ori, "pitch", 792 item.orientation.y, 793 {"unit":item.orientation_unit}) 794 written = written | write_node(doc, ori, "yaw", 795 item.orientation.z, 796 {"unit":item.orientation_unit}) 682 797 if written == True: 683 798 det.appendChild(ori) … … 731 846 :param variable: name of the data member to store it in [string] 732 847 :param storage: data object that has the 'variable' data member 733 :param optional: if True, no exception will be raised if unit conversion can't be done 848 :param optional: if True, no exception will be raised 849 if unit conversion can't be done 734 850 735 851 :raise ValueError: raised when the units are not recognized … … 752 868 try: 753 869 conv = Converter(units) 754 exec "storage.%s = %g" % (variable, conv(value, units=local_unit)) 870 exec "storage.%s = %g" % (variable, 871 conv(value, units=local_unit)) 755 872 except: 756 err_mess = "CanSAS reader: could not convert %s unit [%s]; expecting [%s]\n %s" \ 873 err_mess = "CanSAS reader: could not convert" 874 err_mess += " %s unit [%s]; expecting [%s]\n %s" \ 757 875 % (variable, units, local_unit, sys.exc_value) 758 876 self.errors.append(err_mess) … … 762 880 raise ValueError, err_mess 763 881 else: 764 err_mess = "CanSAS reader: unrecognized %s unit [%s]; expecting [%s]" \ 765 % (variable, units, local_unit) 882 err_mess = "CanSAS reader: unrecognized %s unit [%s];" 883 err_mess += " expecting [%s]" % (variable, 884 units, local_unit) 766 885 self.errors.append(err_mess) 767 886 if optional: -
DataLoader/readers/danse_reader.py
r0997158f ra7a5886 18 18 import math 19 19 import os 20 import copy20 #import copy 21 21 import numpy 22 22 import logging … … 50 50 read_it = False 51 51 for item in self.ext: 52 if filename.lower().find(item) >=0:52 if filename.lower().find(item) >= 0: 53 53 read_it = True 54 54 55 55 if read_it: 56 56 try: 57 57 datafile = open(filename, 'r') 58 58 except : 59 raise RuntimeError,"danse_reader cannot open %s"%(filename) 60 61 59 raise RuntimeError,"danse_reader cannot open %s" % (filename) 60 62 61 # defaults 63 62 # wavelength in Angstrom … … 84 83 85 84 output.data = numpy.zeros([size_x,size_y]) 86 output.err_data = numpy.zeros([size_x, size_y])85 output.err_data = numpy.zeros([size_x, size_y]) 87 86 88 87 data_conv_q = None … … 102 101 while read_on: 103 102 line = datafile.readline() 104 if line.find("DATA:") >=0:103 if line.find("DATA:") >= 0: 105 104 read_on = False 106 105 break 107 106 toks = line.split(':') 108 if toks[0] =="FORMATVERSION":107 if toks[0] == "FORMATVERSION": 109 108 fversion = float(toks[1]) 110 if toks[0] =="WAVELENGTH":109 if toks[0] == "WAVELENGTH": 111 110 wavelength = float(toks[1]) 112 elif toks[0] =="DISTANCE":111 elif toks[0] == "DISTANCE": 113 112 distance = float(toks[1]) 114 elif toks[0] =="CENTER_X":113 elif toks[0] == "CENTER_X": 115 114 center_x = float(toks[1]) 116 elif toks[0] =="CENTER_Y":115 elif toks[0] == "CENTER_Y": 117 116 center_y = float(toks[1]) 118 elif toks[0] =="PIXELSIZE":117 elif toks[0] == "PIXELSIZE": 119 118 pixel = float(toks[1]) 120 elif toks[0] =="SIZE_X":119 elif toks[0] == "SIZE_X": 121 120 size_x = int(toks[1]) 122 elif toks[0] =="SIZE_Y":121 elif toks[0] == "SIZE_Y": 123 122 size_y = int(toks[1]) 124 123 … … 126 125 data = [] 127 126 error = [] 128 if fversion ==1.0:127 if fversion == 1.0: 129 128 data_str = datafile.readline() 130 129 data = data_str.split(' ') … … 133 132 while read_on: 134 133 data_str = datafile.readline() 135 if len(data_str) ==0:134 if len(data_str) == 0: 136 135 read_on = False 137 136 else: … … 146 145 error.append(err) 147 146 except: 148 logging.info("Skipping line:%s,%s" %( data_str,sys.exc_value)) 147 logging.info("Skipping line:%s,%s" %( data_str, 148 sys.exc_value)) 149 149 150 150 # Initialize … … 158 158 # Qx and Qy vectors 159 159 theta = pixel / distance / 100.0 160 stepq = 4.0 *math.pi/wavelength * math.sin(theta/2.0)160 stepq = 4.0 * math.pi/wavelength * math.sin(theta/2.0) 161 161 for i_x in range(size_x): 162 theta = (i_x -center_x+1)*pixel / distance / 100.0163 qx = 4.0 *math.pi/wavelength * math.sin(theta/2.0)162 theta = (i_x - center_x + 1) * pixel / distance / 100.0 163 qx = 4.0 * math.pi / wavelength * math.sin(theta/2.0) 164 164 165 165 if has_converter == True and output.Q_unit != '1/A': … … 167 167 168 168 x_vals.append(qx) 169 if xmin ==None or qx<xmin:169 if xmin == None or qx < xmin: 170 170 xmin = qx 171 if xmax ==None or qx>xmax:171 if xmax == None or qx > xmax: 172 172 xmax = qx 173 173 … … 175 175 ymax = None 176 176 for i_y in range(size_y): 177 theta = (i_y -center_y+1)*pixel / distance / 100.0178 qy = 4.0 *math.pi/wavelength * math.sin(theta/2.0)177 theta = (i_y - center_y + 1) * pixel / distance / 100.0 178 qy = 4.0 * math.pi/wavelength * math.sin(theta/2.0) 179 179 180 180 if has_converter == True and output.Q_unit != '1/A': … … 182 182 183 183 y_vals.append(qy) 184 if ymin ==None or qy<ymin:184 if ymin == None or qy < ymin: 185 185 ymin = qy 186 if ymax ==None or qy>ymax:186 if ymax == None or qy > ymax: 187 187 ymax = qy 188 188 … … 198 198 # For version 1.0, the data were still 199 199 # stored as strings at this point. 200 logging.info("Skipping entry (v1.0):%s,%s" %(str(data[i_pt]), sys.exc_value)) 200 msg = "Skipping entry (v1.0):%s,%s" %(str(data[i_pt]), 201 sys.exc_value) 202 logging.info(msg) 201 203 202 204 # Get bin number 203 if math.fmod(i_pt, size_x) ==0:205 if math.fmod(i_pt, size_x) == 0: 204 206 i_x = 0 205 207 i_y += 1 … … 213 215 #output.err_data[size_y-1-i_y][i_x] = error[i_pt] 214 216 215 217 216 218 # Store all data 217 219 # Store wavelength 218 if has_converter ==True and output.source.wavelength_unit != 'A':220 if has_converter == True and output.source.wavelength_unit != 'A': 219 221 conv = Converter('A') 220 wavelength = conv(wavelength, units=output.source.wavelength_unit) 222 wavelength = conv(wavelength, 223 units=output.source.wavelength_unit) 221 224 output.source.wavelength = wavelength 222 225 223 226 # Store distance 224 if has_converter ==True and detector.distance_unit != 'm':227 if has_converter == True and detector.distance_unit != 'm': 225 228 conv = Converter('m') 226 229 distance = conv(distance, units=detector.distance_unit) … … 228 231 229 232 # Store pixel size 230 if has_converter ==True and detector.pixel_size_unit != 'mm':233 if has_converter == True and detector.pixel_size_unit != 'mm': 231 234 conv = Converter('mm') 232 235 pixel = conv(pixel, units=detector.pixel_size_unit) … … 239 242 240 243 # Store limits of the image (2D array) 241 xmin =xmin-stepq/2.0242 xmax =xmax+stepq/2.0243 ymin =ymin-stepq/2.0244 ymax =ymax+stepq/2.0244 xmin = xmin - stepq / 2.0 245 xmax = xmax + stepq / 2.0 246 ymin = ymin - stepq /2.0 247 ymax = ymax + stepq / 2.0 245 248 246 249 if has_converter == True and output.Q_unit != '1/A': … … 271 274 output.zaxis("\\rm{Intensity}","cm^{-1}") 272 275 273 if not fversion>=1.0: 274 raise ValueError,"Danse_reader can't read this file %s"%filename 275 else: 276 logging.info("Danse_reader Reading %s \n"%filename) 276 if not fversion >= 1.0: 277 msg = "Danse_reader can't read this file %s" % filename 278 raise ValueError, msg 279 else: 280 logging.info("Danse_reader Reading %s \n" % filename) 277 281 278 282 # Store loading process information -
DataLoader/readers/hfir1d_reader.py
r0997158f ra7a5886 28 28 type = ["HFIR 1D files (*.d1d)|*.d1d"] 29 29 ## List of allowed extensions 30 ext =['.d1d']30 ext = ['.d1d'] 31 31 32 32 def read(self, path): … … 87 87 _dy = data_conv_i(_dy, units=output.y_unit) 88 88 89 90 89 x = numpy.append(x, _x) 91 90 y = numpy.append(y, _y) … … 99 98 # Sanity check 100 99 if not len(y) == len(dy): 101 raise RuntimeError, "hfir1d_reader: y and dy have different length" 100 msg = "hfir1d_reader: y and dy have different length" 101 raise RuntimeError, msg 102 102 if not len(x) == len(dx): 103 raise RuntimeError, "hfir1d_reader: x and dx have different length" 103 msg = "hfir1d_reader: x and dx have different length" 104 raise RuntimeError, msg 104 105 105 106 # If the data length is zero, consider this as 106 107 # though we were not able to read the file. 107 if len(x) ==0:108 if len(x) == 0: 108 109 raise RuntimeError, "hfir1d_reader: could not load file" 109 110 -
DataLoader/readers/red2d_reader.py
rd2539aa ra7a5886 13 13 14 14 15 import os, sys 15 import os 16 #import sys 16 17 import numpy 17 import math, logging 18 import math 19 #import logging 18 20 from DataLoader.data_info import Data2D, Detector 19 21 … … 121 123 wavelength = float(line_toks[1]) 122 124 # Units 123 if has_converter==True and output.source.wavelength_unit != 'A': 125 if has_converter == True and \ 126 output.source.wavelength_unit != 'A': 124 127 conv = Converter('A') 125 wavelength = conv(wavelength, units=output.source.wavelength_unit) 128 wavelength = conv(wavelength, 129 units=output.source.wavelength_unit) 126 130 except: 127 131 #Not required … … 131 135 distance = float(line_toks[3]) 132 136 # Units 133 if has_converter ==True and detector.distance_unit != 'm':137 if has_converter == True and detector.distance_unit != 'm': 134 138 conv = Converter('m') 135 139 distance = conv(distance, units=detector.distance_unit) … … 145 149 pass 146 150 147 if line.count("LAMBDA") >0:151 if line.count("LAMBDA") > 0: 148 152 isInfo = True 149 153 … … 156 160 center_y = float(line_toks[1]) 157 161 158 if line.count("BCENT") >0:162 if line.count("BCENT") > 0: 159 163 isCenter = True 160 164 161 165 # Find data start 162 if line.count("Data columns") or line.count("ASCII data") >0:166 if line.count("Data columns") or line.count("ASCII data") > 0: 163 167 dataStarted = True 164 168 continue … … 173 177 col_num = len(line_toks) 174 178 break 175 176 177 179 # Make numpy array to remove header lines using index 178 180 lines_array = numpy.array(lines) … … 182 184 183 185 # get the data lines 184 data_lines = lines_array[lines_index >=(line_num-1)]186 data_lines = lines_array[lines_index >= (line_num - 1)] 185 187 # Now we get the total number of rows (i.e., # of data points) 186 188 row_num = len(data_lines) … … 190 192 data_list = data_list.split() 191 193 192 # Check if the size is consistent with data, otherwise try the tab(\t) separator 193 # (this may be removed once get the confidence the former working all cases). 194 # Check if the size is consistent with data, otherwise 195 #try the tab(\t) separator 196 # (this may be removed once get the confidence 197 #the former working all cases). 194 198 if len(data_list) != (len(data_lines)) * col_num: 195 199 data_list = "\t".join(data_lines.tolist()) … … 202 206 # numpy array form 203 207 data_array = numpy.array(data_list1) 204 # Redimesion based on the row_num and col_num, otherwise raise an error. 208 # Redimesion based on the row_num and col_num, 209 #otherwise raise an error. 205 210 try: 206 data_point = data_array.reshape(row_num, col_num).transpose()211 data_point = data_array.reshape(row_num, col_num).transpose() 207 212 except: 208 raise ValueError, "red2d_reader: Can't read this file: Not a proper file format" 213 msg = "red2d_reader: Can't read this file: Not a proper file format" 214 raise ValueError, msg 209 215 210 216 ## Get the all data: Let's HARDcoding; Todo find better way … … 218 224 qy_data = data_point[1] 219 225 data = data_point[2] 220 if col_num > 3: qz_data = data_point[3]221 if col_num > 4: dqx_data = data_point[4]222 if col_num > 5: dqy_data = data_point[5]223 if col_num > 6: mask[data_point[6]<1] = False226 if col_num > 3: qz_data = data_point[3] 227 if col_num > 4: dqx_data = data_point[4] 228 if col_num > 5: dqy_data = data_point[5] 229 if col_num > 6: mask[data_point[6] < 1] = False 224 230 q_data = numpy.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) 225 231 … … 291 297 # optional data: if all of dq data == 0, do not pass to output 292 298 if len(dqx_data) == len(qx_data) and dqx_data.any()!=0: 293 # if no dqx_data, do not pass dqy_data.(1 axis dq is not supported yet). 299 # if no dqx_data, do not pass dqy_data. 300 #(1 axis dq is not supported yet). 294 301 if len(dqy_data) == len(qy_data) and dqy_data.any()!=0: 295 302 output.dqx_data = dqx_data -
DataLoader/readers/tiff_reader.py
r0997158f ra7a5886 41 41 import Image 42 42 except: 43 raise RuntimeError, "tiff_reader: could not load file. Missing Image module." 43 msg = "tiff_reader: could not load file. Missing Image module." 44 raise RuntimeError, msg 44 45 45 46 # Instantiate data object
Note: See TracChangeset
for help on using the changeset viewer.