Changeset 6a455cd3 in sasview for src/sas/sascalc/dataloader/readers
- Timestamp:
- Jul 24, 2017 10:27:05 AM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 146c669
- Parents:
- b61bd57 (diff), bc04647 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader/readers
- Files:
-
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/IgorReader.py
rb699768 ra1b8fee 12 12 #copyright 2008, University of Tennessee 13 13 ############################################################################# 14 from __future__ import print_function 15 14 16 import os 15 import numpy 16 import math 17 #import logging 17 18 18 from sas.sascalc.dataloader.data_info import Data2D 19 19 from sas.sascalc.dataloader.data_info import Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 import numpy as np 21 22 22 23 # Look for unit converter … … 40 41 """ Read file """ 41 42 if not os.path.isfile(filename): 42 raise ValueError, \ 43 "Specified file %s is not a regular file" % filename 44 45 # Read file 46 f = open(filename, 'r') 47 buf = f.read() 48 49 # Instantiate data object 43 raise ValueError("Specified file %s is not a regular " 44 "file" % filename) 45 50 46 output = Data2D() 47 51 48 output.filename = os.path.basename(filename) 52 49 detector = Detector() 53 if len(output.detector) > 0:54 print str(output.detector[0])50 if len(output.detector): 51 print(str(output.detector[0])) 55 52 output.detector.append(detector) 56 57 # Get content 58 dataStarted = False 59 60 lines = buf.split('\n') 61 itot = 0 62 x = [] 63 y = [] 64 65 ncounts = 0 66 67 xmin = None 68 xmax = None 69 ymin = None 70 ymax = None 71 72 i_x = 0 73 i_y = -1 74 i_tot_row = 0 75 76 isInfo = False 77 isCenter = False 78 79 data_conv_q = None 80 data_conv_i = None 81 82 if has_converter == True and output.Q_unit != '1/A': 53 54 data_conv_q = data_conv_i = None 55 56 if has_converter and output.Q_unit != '1/A': 83 57 data_conv_q = Converter('1/A') 84 58 # Test it 85 59 data_conv_q(1.0, output.Q_unit) 86 60 87 if has_converter == Trueand output.I_unit != '1/cm':61 if has_converter and output.I_unit != '1/cm': 88 62 data_conv_i = Converter('1/cm') 89 63 # Test it 90 64 data_conv_i(1.0, output.I_unit) 91 92 for line in lines: 93 94 # Find setup info line 95 if isInfo: 96 isInfo = False 97 line_toks = line.split() 98 # Wavelength in Angstrom 99 try: 100 wavelength = float(line_toks[1]) 101 except: 102 msg = "IgorReader: can't read this file, missing wavelength" 103 raise ValueError, msg 104 105 #Find # of bins in a row assuming the detector is square. 106 if dataStarted == True: 107 try: 108 value = float(line) 109 except: 110 # Found a non-float entry, skip it 111 continue 112 113 # Get total bin number 114 115 i_tot_row += 1 116 i_tot_row = math.ceil(math.sqrt(i_tot_row)) - 1 117 #print "i_tot", i_tot_row 118 size_x = i_tot_row # 192#128 119 size_y = i_tot_row # 192#128 120 output.data = numpy.zeros([size_x, size_y]) 121 output.err_data = numpy.zeros([size_x, size_y]) 122 123 #Read Header and 2D data 124 for line in lines: 125 # Find setup info line 126 if isInfo: 127 isInfo = False 128 line_toks = line.split() 129 # Wavelength in Angstrom 130 try: 131 wavelength = float(line_toks[1]) 132 except: 133 msg = "IgorReader: can't read this file, missing wavelength" 134 raise ValueError, msg 135 # Distance in meters 136 try: 137 distance = float(line_toks[3]) 138 except: 139 msg = "IgorReader: can't read this file, missing distance" 140 raise ValueError, msg 141 142 # Distance in meters 143 try: 144 transmission = float(line_toks[4]) 145 except: 146 msg = "IgorReader: can't read this file, " 147 msg += "missing transmission" 148 raise ValueError, msg 149 150 if line.count("LAMBDA") > 0: 151 isInfo = True 152 153 # Find center info line 154 if isCenter: 155 isCenter = False 156 line_toks = line.split() 157 158 # Center in bin number: Must substrate 1 because 159 #the index starts from 1 160 center_x = float(line_toks[0]) - 1 161 center_y = float(line_toks[1]) - 1 162 163 if line.count("BCENT") > 0: 164 isCenter = True 165 166 # Find data start 167 if line.count("***")>0: 168 dataStarted = True 169 170 # Check that we have all the info 171 if wavelength == None \ 172 or distance == None \ 173 or center_x == None \ 174 or center_y == None: 175 msg = "IgorReader:Missing information in data file" 176 raise ValueError, msg 177 178 if dataStarted == True: 179 try: 180 value = float(line) 181 except: 182 # Found a non-float entry, skip it 183 continue 184 185 # Get bin number 186 if math.fmod(itot, i_tot_row) == 0: 187 i_x = 0 188 i_y += 1 189 else: 190 i_x += 1 191 192 output.data[i_y][i_x] = value 193 ncounts += 1 194 195 # Det 640 x 640 mm 196 # Q = 4pi/lambda sin(theta/2) 197 # Bin size is 0.5 cm 198 #REmoved +1 from theta = (i_x-center_x+1)*0.5 / distance 199 # / 100.0 and 200 #REmoved +1 from theta = (i_y-center_y+1)*0.5 / 201 # distance / 100.0 202 #ToDo: Need complete check if the following 203 # covert process is consistent with fitting.py. 204 theta = (i_x - center_x) * 0.5 / distance / 100.0 205 qx = 4.0 * math.pi / wavelength * math.sin(theta/2.0) 206 207 if has_converter == True and output.Q_unit != '1/A': 208 qx = data_conv_q(qx, units=output.Q_unit) 209 210 if xmin == None or qx < xmin: 211 xmin = qx 212 if xmax == None or qx > xmax: 213 xmax = qx 214 215 theta = (i_y - center_y) * 0.5 / distance / 100.0 216 qy = 4.0 * math.pi / wavelength * math.sin(theta / 2.0) 217 218 if has_converter == True and output.Q_unit != '1/A': 219 qy = data_conv_q(qy, units=output.Q_unit) 220 221 if ymin == None or qy < ymin: 222 ymin = qy 223 if ymax == None or qy > ymax: 224 ymax = qy 225 226 if not qx in x: 227 x.append(qx) 228 if not qy in y: 229 y.append(qy) 230 231 itot += 1 232 233 65 66 data_row = 0 67 wavelength = distance = center_x = center_y = None 68 dataStarted = isInfo = isCenter = False 69 70 with open(filename, 'r') as f: 71 for line in f: 72 data_row += 1 73 # Find setup info line 74 if isInfo: 75 isInfo = False 76 line_toks = line.split() 77 # Wavelength in Angstrom 78 try: 79 wavelength = float(line_toks[1]) 80 except ValueError: 81 msg = "IgorReader: can't read this file, missing wavelength" 82 raise ValueError(msg) 83 # Distance in meters 84 try: 85 distance = float(line_toks[3]) 86 except ValueError: 87 msg = "IgorReader: can't read this file, missing distance" 88 raise ValueError(msg) 89 90 # Distance in meters 91 try: 92 transmission = float(line_toks[4]) 93 except: 94 msg = "IgorReader: can't read this file, " 95 msg += "missing transmission" 96 raise ValueError(msg) 97 98 if line.count("LAMBDA"): 99 isInfo = True 100 101 # Find center info line 102 if isCenter: 103 isCenter = False 104 line_toks = line.split() 105 106 # Center in bin number: Must subtract 1 because 107 # the index starts from 1 108 center_x = float(line_toks[0]) - 1 109 center_y = float(line_toks[1]) - 1 110 111 if line.count("BCENT"): 112 isCenter = True 113 114 # Find data start 115 if line.count("***"): 116 # now have to continue to blank line 117 dataStarted = True 118 119 # Check that we have all the info 120 if (wavelength is None 121 or distance is None 122 or center_x is None 123 or center_y is None): 124 msg = "IgorReader:Missing information in data file" 125 raise ValueError(msg) 126 127 if dataStarted: 128 if len(line.rstrip()): 129 continue 130 else: 131 break 132 133 # The data is loaded in row major order (last index changing most 134 # rapidly). However, the original data is in column major order (first 135 # index changing most rapidly). The swap to column major order is done 136 # in reader2D_converter at the end of this method. 137 data = np.loadtxt(filename, skiprows=data_row) 138 size_x = size_y = int(np.rint(np.sqrt(data.size))) 139 output.data = np.reshape(data, (size_x, size_y)) 140 output.err_data = np.zeros_like(output.data) 141 142 # Det 640 x 640 mm 143 # Q = 4 * pi/lambda * sin(theta/2) 144 # Bin size is 0.5 cm 145 # Removed +1 from theta = (i_x - center_x + 1)*0.5 / distance 146 # / 100.0 and 147 # Removed +1 from theta = (i_y - center_y + 1)*0.5 / 148 # distance / 100.0 149 # ToDo: Need complete check if the following 150 # convert process is consistent with fitting.py. 151 152 # calculate qx, qy bin centers of each pixel in the image 153 theta = (np.arange(size_x) - center_x) * 0.5 / distance / 100. 154 qx = 4 * np.pi / wavelength * np.sin(theta/2) 155 156 theta = (np.arange(size_y) - center_y) * 0.5 / distance / 100. 157 qy = 4 * np.pi / wavelength * np.sin(theta/2) 158 159 if has_converter and output.Q_unit != '1/A': 160 qx = data_conv_q(qx, units=output.Q_unit) 161 qy = data_conv_q(qx, units=output.Q_unit) 162 163 xmax = np.max(qx) 164 xmin = np.min(qx) 165 ymax = np.max(qy) 166 ymin = np.min(qy) 167 168 # calculate edge offset in q. 234 169 theta = 0.25 / distance / 100.0 235 xstep = 4.0 * math.pi / wavelength * math.sin(theta / 2.0)170 xstep = 4.0 * np.pi / wavelength * np.sin(theta / 2.0) 236 171 237 172 theta = 0.25 / distance / 100.0 238 ystep = 4.0 * math.pi/ wavelength * math.sin(theta / 2.0)173 ystep = 4.0 * np.pi/ wavelength * np.sin(theta / 2.0) 239 174 240 175 # Store all data ###################################### 241 176 # Store wavelength 242 if has_converter == Trueand output.source.wavelength_unit != 'A':177 if has_converter and output.source.wavelength_unit != 'A': 243 178 conv = Converter('A') 244 179 wavelength = conv(wavelength, units=output.source.wavelength_unit) … … 246 181 247 182 # Store distance 248 if has_converter == Trueand detector.distance_unit != 'm':183 if has_converter and detector.distance_unit != 'm': 249 184 conv = Converter('m') 250 185 distance = conv(distance, units=detector.distance_unit) … … 254 189 output.sample.transmission = transmission 255 190 256 # Store pixel size 191 # Store pixel size (mm) 257 192 pixel = 5.0 258 if has_converter == Trueand detector.pixel_size_unit != 'mm':193 if has_converter and detector.pixel_size_unit != 'mm': 259 194 conv = Converter('mm') 260 195 pixel = conv(pixel, units=detector.pixel_size_unit) … … 267 202 268 203 # Store limits of the image (2D array) 269 xmin = xmin -xstep / 2.0270 xmax = xmax +xstep / 2.0271 ymin = ymin -ystep / 2.0272 ymax = ymax +ystep / 2.0273 if has_converter == Trueand output.Q_unit != '1/A':204 xmin -= xstep / 2.0 205 xmax += xstep / 2.0 206 ymin -= ystep / 2.0 207 ymax += ystep / 2.0 208 if has_converter and output.Q_unit != '1/A': 274 209 xmin = data_conv_q(xmin, units=output.Q_unit) 275 210 xmax = data_conv_q(xmax, units=output.Q_unit) … … 282 217 283 218 # Store x and y axis bin centers 284 output.x_bins = x285 output.y_bins = y219 output.x_bins = qx.tolist() 220 output.y_bins = qy.tolist() 286 221 287 222 # Units -
src/sas/sascalc/dataloader/readers/abs_reader.py
rb699768 r959eb01 9 9 ###################################################################### 10 10 11 import numpy 11 import numpy as np 12 12 import os 13 13 from sas.sascalc.dataloader.data_info import Data1D … … 53 53 buff = input_f.read() 54 54 lines = buff.split('\n') 55 x = n umpy.zeros(0)56 y = n umpy.zeros(0)57 dy = n umpy.zeros(0)58 dx = n umpy.zeros(0)55 x = np.zeros(0) 56 y = np.zeros(0) 57 dy = np.zeros(0) 58 dx = np.zeros(0) 59 59 output = Data1D(x, y, dy=dy, dx=dx) 60 60 detector = Detector() … … 204 204 _dy = data_conv_i(_dy, units=output.y_unit) 205 205 206 x = n umpy.append(x, _x)207 y = n umpy.append(y, _y)208 dy = n umpy.append(dy, _dy)209 dx = n umpy.append(dx, _dx)206 x = np.append(x, _x) 207 y = np.append(y, _y) 208 dy = np.append(dy, _dy) 209 dx = np.append(dx, _dx) 210 210 211 211 except: -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rd2471870 r235f514 14 14 15 15 16 import numpy 16 import numpy as np 17 17 import os 18 18 from sas.sascalc.dataloader.data_info import Data1D … … 69 69 70 70 # Arrays for data storage 71 tx = n umpy.zeros(0)72 ty = n umpy.zeros(0)73 tdy = n umpy.zeros(0)74 tdx = n umpy.zeros(0)71 tx = np.zeros(0) 72 ty = np.zeros(0) 73 tdy = np.zeros(0) 74 tdx = np.zeros(0) 75 75 76 76 # The first good line of data will define whether … … 128 128 if new_lentoks > 2: 129 129 _dy = float(toks[2]) 130 has_error_dy = False if _dy ==None else True130 has_error_dy = False if _dy is None else True 131 131 132 132 # If a 4th row is present, consider it dx 133 133 if new_lentoks > 3: 134 134 _dx = float(toks[3]) 135 has_error_dx = False if _dx ==None else True135 has_error_dx = False if _dx is None else True 136 136 137 137 # Delete the previously stored lines of data candidates if … … 140 140 is_data == False: 141 141 try: 142 tx = n umpy.zeros(0)143 ty = n umpy.zeros(0)144 tdy = n umpy.zeros(0)145 tdx = n umpy.zeros(0)142 tx = np.zeros(0) 143 ty = np.zeros(0) 144 tdy = np.zeros(0) 145 tdx = np.zeros(0) 146 146 except: 147 147 pass 148 148 149 149 if has_error_dy == True: 150 tdy = n umpy.append(tdy, _dy)150 tdy = np.append(tdy, _dy) 151 151 if has_error_dx == True: 152 tdx = n umpy.append(tdx, _dx)153 tx = n umpy.append(tx, _x)154 ty = n umpy.append(ty, _y)152 tdx = np.append(tdx, _dx) 153 tx = np.append(tx, _x) 154 ty = np.append(ty, _y) 155 155 156 156 #To remember the # of columns on the current line … … 188 188 #Let's re-order the data to make cal. 189 189 # curve look better some cases 190 ind = n umpy.lexsort((ty, tx))191 x = n umpy.zeros(len(tx))192 y = n umpy.zeros(len(ty))193 dy = n umpy.zeros(len(tdy))194 dx = n umpy.zeros(len(tdx))190 ind = np.lexsort((ty, tx)) 191 x = np.zeros(len(tx)) 192 y = np.zeros(len(ty)) 193 dy = np.zeros(len(tdy)) 194 dx = np.zeros(len(tdx)) 195 195 output = Data1D(x, y, dy=dy, dx=dx) 196 196 self.filename = output.filename = basename … … 212 212 output.y = y[x != 0] 213 213 output.dy = dy[x != 0] if has_error_dy == True\ 214 else n umpy.zeros(len(output.y))214 else np.zeros(len(output.y)) 215 215 output.dx = dx[x != 0] if has_error_dx == True\ 216 else n umpy.zeros(len(output.x))216 else np.zeros(len(output.x)) 217 217 218 218 output.xaxis("\\rm{Q}", 'A^{-1}') -
src/sas/sascalc/dataloader/readers/associations.py
re5c09cf ra1b8fee 14 14 #copyright 2009, University of Tennessee 15 15 ############################################################################# 16 from __future__ import print_function 17 16 18 import os 17 19 import sys 18 20 import logging 19 21 import json 22 23 logger = logging.getLogger(__name__) 20 24 21 25 FILE_NAME = 'defaults.json' … … 67 71 msg = "read_associations: skipping association" 68 72 msg += " for %s\n %s" % (ext.lower(), sys.exc_value) 69 logg ing.error(msg)73 logger.error(msg) 70 74 else: 71 print "Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())75 print("Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd())) 72 76 73 77 … … 81 85 :param registry_function: function to be called to register each reader 82 86 """ 83 logg ing.info("register_readers is now obsolete: use read_associations()")87 logger.info("register_readers is now obsolete: use read_associations()") 84 88 import abs_reader 85 89 import ascii_reader -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rad4632c r63d773c 135 135 "Sesans": {"storeas": "content"}, 136 136 "zacceptance": {"storeas": "float"}, 137 "yacceptance": {"storeas": "float"}, 137 138 "<any>" : ANY 138 139 } -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r527a190 r6a455cd3 33 33 import xml.dom.minidom 34 34 from xml.dom.minidom import parseString 35 36 logger = logging.getLogger(__name__) 35 37 36 38 PREPROCESS = "xmlpreprocess" … … 285 287 self.current_dataset.yaxis(attr.get('y_axis'), 286 288 attr.get('y_unit')) 289 elif tagname == 'yacceptance': 290 self.current_datainfo.sample.yacceptance = (data_point, unit) 287 291 elif tagname == 'zacceptance': 288 292 self.current_datainfo.sample.zacceptance = (data_point, unit) … … 801 805 :param data1d: presumably a Data1D object 802 806 """ 803 if self.current_dataset ==None:807 if self.current_dataset is None: 804 808 x_vals = np.empty(0) 805 809 y_vals = np.empty(0) … … 889 893 # Write the file 890 894 file_ref = open(filename, 'w') 891 if self.encoding ==None:895 if self.encoding is None: 892 896 self.encoding = "UTF-8" 893 897 doc.write(file_ref, encoding=self.encoding, … … 1009 1013 :param entry_node: lxml node ElementTree object to be appended to 1010 1014 """ 1011 if datainfo.run ==None or datainfo.run == []:1015 if datainfo.run is None or datainfo.run == []: 1012 1016 datainfo.run.append(RUN_NAME_DEFAULT) 1013 1017 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT … … 1057 1061 sesans.text = str(datainfo.isSesans) 1058 1062 entry_node.append(sesans) 1063 self.write_node(entry_node, "yacceptance", datainfo.sample.yacceptance[0], 1064 {'unit': datainfo.sample.yacceptance[1]}) 1059 1065 self.write_node(entry_node, "zacceptance", datainfo.sample.zacceptance[0], 1060 1066 {'unit': datainfo.sample.zacceptance[1]}) … … 1129 1135 self.write_node(point, "T", spectrum.transmission[i], 1130 1136 {'unit': spectrum.transmission_unit}) 1131 if spectrum.transmission_deviation !=None \1137 if spectrum.transmission_deviation is not None \ 1132 1138 and len(spectrum.transmission_deviation) >= i: 1133 1139 self.write_node(point, "Tdev", … … 1209 1215 str(datainfo.source.name)) 1210 1216 self.append(source, instr) 1211 if datainfo.source.radiation ==None or datainfo.source.radiation == '':1217 if datainfo.source.radiation is None or datainfo.source.radiation == '': 1212 1218 datainfo.source.radiation = "neutron" 1213 1219 self.write_node(source, "radiation", datainfo.source.radiation) … … 1250 1256 :param instr: lxml node ElementTree object to be appended to 1251 1257 """ 1252 if datainfo.collimation == [] or datainfo.collimation ==None:1258 if datainfo.collimation == [] or datainfo.collimation is None: 1253 1259 coll = Collimation() 1254 1260 datainfo.collimation.append(coll) … … 1295 1301 :param inst: lxml instrument node to be appended to 1296 1302 """ 1297 if datainfo.detector ==None or datainfo.detector == []:1303 if datainfo.detector is None or datainfo.detector == []: 1298 1304 det = Detector() 1299 1305 det.name = "" … … 1460 1466 local_unit = None 1461 1467 exec "local_unit = storage.%s_unit" % toks[0] 1462 if local_unit !=None and units.lower() != local_unit.lower():1468 if local_unit is not None and units.lower() != local_unit.lower(): 1463 1469 if HAS_CONVERTER == True: 1464 1470 try: … … 1473 1479 self.errors.add(err_mess) 1474 1480 if optional: 1475 logg ing.info(err_mess)1481 logger.info(err_mess) 1476 1482 else: 1477 1483 raise ValueError, err_mess … … 1482 1488 self.errors.add(err_mess) 1483 1489 if optional: 1484 logg ing.info(err_mess)1490 logger.info(err_mess) 1485 1491 else: 1486 1492 raise ValueError, err_mess -
src/sas/sascalc/dataloader/readers/danse_reader.py
rb699768 r235f514 15 15 import os 16 16 import sys 17 import numpy 17 import numpy as np 18 18 import logging 19 19 from sas.sascalc.dataloader.data_info import Data2D, Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 22 logger = logging.getLogger(__name__) 21 23 22 24 # Look for unit converter … … 79 81 output.detector.append(detector) 80 82 81 output.data = n umpy.zeros([size_x,size_y])82 output.err_data = n umpy.zeros([size_x, size_y])83 output.data = np.zeros([size_x,size_y]) 84 output.err_data = np.zeros([size_x, size_y]) 83 85 84 86 data_conv_q = None … … 142 144 error.append(err) 143 145 except: 144 logg ing.info("Skipping line:%s,%s" %(data_str,146 logger.info("Skipping line:%s,%s" %(data_str, 145 147 sys.exc_value)) 146 148 … … 164 166 165 167 x_vals.append(qx) 166 if xmin ==None or qx < xmin:168 if xmin is None or qx < xmin: 167 169 xmin = qx 168 if xmax ==None or qx > xmax:170 if xmax is None or qx > xmax: 169 171 xmax = qx 170 172 … … 179 181 180 182 y_vals.append(qy) 181 if ymin ==None or qy < ymin:183 if ymin is None or qy < ymin: 182 184 ymin = qy 183 if ymax ==None or qy > ymax:185 if ymax is None or qy > ymax: 184 186 ymax = qy 185 187 … … 196 198 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 197 199 sys.exc_value) 198 logg ing.info(msg)200 logger.info(msg) 199 201 200 202 # Get bin number … … 271 273 raise ValueError, msg 272 274 else: 273 logg ing.info("Danse_reader Reading %s \n" % filename)275 logger.info("Danse_reader Reading %s \n" % filename) 274 276 275 277 # Store loading process information -
src/sas/sascalc/dataloader/readers/hfir1d_reader.py
rb699768 r959eb01 9 9 #copyright 2008, University of Tennessee 10 10 ###################################################################### 11 import numpy 11 import numpy as np 12 12 import os 13 13 from sas.sascalc.dataloader.data_info import Data1D … … 52 52 buff = input_f.read() 53 53 lines = buff.split('\n') 54 x = n umpy.zeros(0)55 y = n umpy.zeros(0)56 dx = n umpy.zeros(0)57 dy = n umpy.zeros(0)54 x = np.zeros(0) 55 y = np.zeros(0) 56 dx = np.zeros(0) 57 dy = np.zeros(0) 58 58 output = Data1D(x, y, dx=dx, dy=dy) 59 59 self.filename = output.filename = basename … … 88 88 _dy = data_conv_i(_dy, units=output.y_unit) 89 89 90 x = n umpy.append(x, _x)91 y = n umpy.append(y, _y)92 dx = n umpy.append(dx, _dx)93 dy = n umpy.append(dy, _dy)90 x = np.append(x, _x) 91 y = np.append(y, _y) 92 dx = np.append(dx, _dx) 93 dy = np.append(dy, _dy) 94 94 except: 95 95 # Couldn't parse this line, skip it -
src/sas/sascalc/dataloader/readers/red2d_reader.py
rb699768 ra1b8fee 9 9 #copyright 2008, University of Tennessee 10 10 ###################################################################### 11 from __future__ import print_function 12 11 13 import os 12 import numpy 14 import numpy as np 13 15 import math 14 16 from sas.sascalc.dataloader.data_info import Data2D, Detector … … 82 84 detector = Detector() 83 85 if len(output.detector) > 0: 84 print str(output.detector[0])86 print(str(output.detector[0])) 85 87 output.detector.append(detector) 86 88 … … 198 200 break 199 201 # Make numpy array to remove header lines using index 200 lines_array = n umpy.array(lines)202 lines_array = np.array(lines) 201 203 202 204 # index for lines_array 203 lines_index = n umpy.arange(len(lines))205 lines_index = np.arange(len(lines)) 204 206 205 207 # get the data lines … … 225 227 226 228 # numpy array form 227 data_array = n umpy.array(data_list1)229 data_array = np.array(data_list1) 228 230 # Redimesion based on the row_num and col_num, 229 231 #otherwise raise an error. … … 235 237 ## Get the all data: Let's HARDcoding; Todo find better way 236 238 # Defaults 237 dqx_data = n umpy.zeros(0)238 dqy_data = n umpy.zeros(0)239 err_data = n umpy.ones(row_num)240 qz_data = n umpy.zeros(row_num)241 mask = n umpy.ones(row_num, dtype=bool)239 dqx_data = np.zeros(0) 240 dqy_data = np.zeros(0) 241 err_data = np.ones(row_num) 242 qz_data = np.zeros(row_num) 243 mask = np.ones(row_num, dtype=bool) 242 244 # Get from the array 243 245 qx_data = data_point[0] … … 254 256 dqy_data = data_point[(5 + ver)] 255 257 #if col_num > (6 + ver): mask[data_point[(6 + ver)] < 1] = False 256 q_data = n umpy.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data)258 q_data = np.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) 257 259 258 260 # Extra protection(it is needed for some data files): … … 262 264 263 265 # Store limits of the image in q space 264 xmin = n umpy.min(qx_data)265 xmax = n umpy.max(qx_data)266 ymin = n umpy.min(qy_data)267 ymax = n umpy.max(qy_data)266 xmin = np.min(qx_data) 267 xmax = np.max(qx_data) 268 ymin = np.min(qy_data) 269 ymax = np.max(qy_data) 268 270 269 271 # units … … 287 289 288 290 # store x and y axis bin centers in q space 289 x_bins = n umpy.arange(xmin, xmax + xstep, xstep)290 y_bins = n umpy.arange(ymin, ymax + ystep, ystep)291 x_bins = np.arange(xmin, xmax + xstep, xstep) 292 y_bins = np.arange(ymin, ymax + ystep, ystep) 291 293 292 294 # get the limits of q values … … 300 302 output.data = data 301 303 if (err_data == 1).all(): 302 output.err_data = n umpy.sqrt(numpy.abs(data))304 output.err_data = np.sqrt(np.abs(data)) 303 305 output.err_data[output.err_data == 0.0] = 1.0 304 306 else: … … 335 337 # tranfer the comp. to cartesian coord. for newer version. 336 338 if ver != 1: 337 diag = n umpy.sqrt(qx_data * qx_data + qy_data * qy_data)339 diag = np.sqrt(qx_data * qx_data + qy_data * qy_data) 338 340 cos_th = qx_data / diag 339 341 sin_th = qy_data / diag 340 output.dqx_data = n umpy.sqrt((dqx_data * cos_th) * \342 output.dqx_data = np.sqrt((dqx_data * cos_th) * \ 341 343 (dqx_data * cos_th) \ 342 344 + (dqy_data * sin_th) * \ 343 345 (dqy_data * sin_th)) 344 output.dqy_data = n umpy.sqrt((dqx_data * sin_th) * \346 output.dqy_data = np.sqrt((dqx_data * sin_th) * \ 345 347 (dqx_data * sin_th) \ 346 348 + (dqy_data * cos_th) * \ -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r7caf3e5 r149b8f6 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 import numpy 8 import numpy as np 9 9 import os 10 10 from sas.sascalc.dataloader.data_info import Data1D … … 18 18 _ZERO = 1e-16 19 19 20 20 21 class Reader: 21 22 """ 22 23 Class to load sesans files (6 columns). 23 24 """ 24 # #File type25 # File type 25 26 type_name = "SESANS" 26 27 # #Wildcards27 28 # Wildcards 28 29 type = ["SESANS files (*.ses)|*.ses", 29 30 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions31 # List of allowed extensions 31 32 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check33 34 # Flag to bypass extension check 34 35 allow_all = True 35 36 36 37 def read(self, path): 37 38 # print "reader triggered"39 40 38 """ 41 39 Load data file 42 40 43 41 :param path: file path 44 42 45 43 :return: SESANSData1D object, or None 46 44 47 45 :raise RuntimeError: when the file can't be opened 48 46 :raise ValueError: when the length of the data vectors are inconsistent … … 51 49 basename = os.path.basename(path) 52 50 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = numpy.zeros(0) 63 y = numpy.zeros(0) 64 dy = numpy.zeros(0) 65 lam = numpy.zeros(0) 66 dlam = numpy.zeros(0) 67 dx = numpy.zeros(0) 68 69 #temp. space to sort data 70 tx = numpy.zeros(0) 71 ty = numpy.zeros(0) 72 tdy = numpy.zeros(0) 73 tlam = numpy.zeros(0) 74 tdlam = numpy.zeros(0) 75 tdx = numpy.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 51 if not (self.allow_all or extension.lower() in self.ext): 52 raise RuntimeError( 53 "{} has an unrecognized file extension".format(path)) 54 else: 55 raise RuntimeError("{} is not a file".format(path)) 56 with open(path, 'r') as input_f: 57 line = input_f.readline() 58 params = {} 59 while not line.startswith("BEGIN_DATA"): 60 terms = line.split() 61 if len(terms) >= 2: 62 params[terms[0]] = " ".join(terms[1:]) 63 line = input_f.readline() 64 self.params = params 78 65 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 66 if "FileFormatVersion" not in self.params: 67 raise RuntimeError("SES file missing FileFormatVersion") 68 if float(self.params["FileFormatVersion"]) >= 2.0: 69 raise RuntimeError("SASView only supports SES version 1") 87 70 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 71 if "SpinEchoLength_unit" not in self.params: 72 raise RuntimeError("SpinEchoLength has no units") 73 if "Wavelength_unit" not in self.params: 74 raise RuntimeError("Wavelength has no units") 75 if params["SpinEchoLength_unit"] != params["Wavelength_unit"]: 76 raise RuntimeError("The spin echo data has rudely used " 77 "different units for the spin echo length " 78 "and the wavelength. While sasview could " 79 "handle this instance, it is a violation " 80 "of the file format and will not be " 81 "handled by other software.") 104 82 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 83 headers = input_f.readline().split() 128 84 129 x,y,lam,dy,dx,dlam = [130 numpy.asarray(v, 'double')131 for v in (x,y,lam,dy,dx,dlam)132 ]85 self._insist_header(headers, "SpinEchoLength") 86 self._insist_header(headers, "Depolarisation") 87 self._insist_header(headers, "Depolarisation_error") 88 self._insist_header(headers, "Wavelength") 133 89 134 input_f.close()90 data = np.loadtxt(input_f) 135 91 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 92 if data.shape[1] != len(headers): 93 raise RuntimeError( 94 "File has {} headers, but {} columns".format( 95 len(headers), 96 data.shape[1])) 146 97 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 98 if not data.size: 99 raise RuntimeError("{} is empty".format(path)) 100 x = data[:, headers.index("SpinEchoLength")] 101 if "SpinEchoLength_error" in headers: 102 dx = data[:, headers.index("SpinEchoLength_error")] 103 else: 104 dx = x * 0.05 105 lam = data[:, headers.index("Wavelength")] 106 if "Wavelength_error" in headers: 107 dlam = data[:, headers.index("Wavelength_error")] 108 else: 109 dlam = lam * 0.05 110 y = data[:, headers.index("Depolarisation")] 111 dy = data[:, headers.index("Depolarisation_error")] 158 112 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 113 lam_unit = self._unit_fetch("Wavelength") 114 x, x_unit = self._unit_conversion(x, "A", 115 self._unit_fetch( 116 "SpinEchoLength")) 117 dx, dx_unit = self._unit_conversion( 118 dx, lam_unit, 119 self._unit_fetch("SpinEchoLength")) 120 dlam, dlam_unit = self._unit_conversion( 121 dlam, lam_unit, 122 self._unit_fetch("Wavelength")) 123 y_unit = self._unit_fetch("Depolarisation") 162 124 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 125 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 126 isSesans=True) 166 127 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 169 data_conv_q = Converter(value_unit) 170 value = data_conv_q(value, units=default_unit) 128 output.y_unit = y_unit 129 output.x_unit = x_unit 130 output.source.wavelength_unit = lam_unit 131 output.source.wavelength = lam 132 self.filename = output.filename = basename 133 output.xaxis(r"\rm{z}", x_unit) 134 # Adjust label to ln P/(lam^2 t), remove lam column refs 135 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 136 # Store loading process information 137 output.meta_data['loader'] = self.type_name 138 output.sample.name = params["Sample"] 139 output.sample.ID = params["DataFileTitle"] 140 output.sample.thickness = self._unit_conversion( 141 float(params["Thickness"]), "cm", 142 self._unit_fetch("Thickness"))[0] 143 144 output.sample.zacceptance = ( 145 float(params["Theta_zmax"]), 146 self._unit_fetch("Theta_zmax")) 147 148 output.sample.yacceptance = ( 149 float(params["Theta_ymax"]), 150 self._unit_fetch("Theta_ymax")) 151 return output 152 153 @staticmethod 154 def _insist_header(headers, name): 155 if name not in headers: 156 raise RuntimeError( 157 "Missing {} column in spin echo data".format(name)) 158 159 @staticmethod 160 def _unit_conversion(value, value_unit, default_unit): 161 """ 162 Performs unit conversion on a measurement. 163 164 :param value: The magnitude of the measurement 165 :param value_unit: a string containing the final desired unit 166 :param default_unit: string with the units of the original measurement 167 :return: The magnitude of the measurement in the new units 168 """ 169 # (float, string, string) -> float 170 if has_converter and value_unit != default_unit: 171 data_conv_q = Converter(default_unit) 172 value = data_conv_q(value, units=value_unit) 171 173 new_unit = default_unit 172 174 else: 173 175 new_unit = value_unit 174 176 return value, new_unit 177 178 def _unit_fetch(self, unit): 179 return self.params[unit+"_unit"] -
src/sas/sascalc/dataloader/readers/tiff_reader.py
rb699768 r959eb01 13 13 import logging 14 14 import os 15 import numpy 15 import numpy as np 16 16 from sas.sascalc.dataloader.data_info import Data2D 17 17 from sas.sascalc.dataloader.manipulations import reader2D_converter 18 18 19 logger = logging.getLogger(__name__) 20 19 21 class Reader: 20 22 """ … … 56 58 57 59 # Initiazed the output data object 58 output.data = n umpy.zeros([im.size[0], im.size[1]])59 output.err_data = n umpy.zeros([im.size[0], im.size[1]])60 output.mask = n umpy.ones([im.size[0], im.size[1]], dtype=bool)60 output.data = np.zeros([im.size[0], im.size[1]]) 61 output.err_data = np.zeros([im.size[0], im.size[1]]) 62 output.mask = np.ones([im.size[0], im.size[1]], dtype=bool) 61 63 62 64 # Initialize … … 76 78 value = float(val) 77 79 except: 78 logg ing.error("tiff_reader: had to skip a non-float point")80 logger.error("tiff_reader: had to skip a non-float point") 79 81 continue 80 82 … … 94 96 output.x_bins = x_vals 95 97 output.y_bins = y_vals 96 output.qx_data = n umpy.array(x_vals)97 output.qy_data = n umpy.array(y_vals)98 output.qx_data = np.array(x_vals) 99 output.qy_data = np.array(y_vals) 98 100 output.xmin = 0 99 101 output.xmax = im.size[0] - 1 -
src/sas/sascalc/dataloader/readers/xml_reader.py
r527a190 r6a455cd3 18 18 from lxml import etree 19 19 from lxml.builder import E 20 21 logger = logging.getLogger(__name__) 20 22 21 23 PARSER = etree.ETCompatXMLParser(remove_comments=True, remove_pis=False) … … 71 73 self.xmlroot = self.xmldoc.getroot() 72 74 except etree.XMLSyntaxError as xml_error: 73 logg ing.info(xml_error)75 logger.info(xml_error) 74 76 except Exception: 75 77 self.xml = None … … 88 90 self.xmlroot = etree.fromstring(tag_soup) 89 91 except etree.XMLSyntaxError as xml_error: 90 logg ing.info(xml_error)92 logger.info(xml_error) 91 93 except Exception: 92 94 self.xml = None … … 102 104 self.schemadoc = etree.parse(self.schema, parser=PARSER) 103 105 except etree.XMLSyntaxError as xml_error: 104 logg ing.info(xml_error)106 logger.info(xml_error) 105 107 except Exception: 106 108 self.schema = None … … 241 243 :param name: The name of the element to be created 242 244 """ 243 if attrib ==None:245 if attrib is None: 244 246 attrib = {} 245 247 return etree.Element(name, attrib, nsmap) … … 300 302 """ 301 303 text = str(text) 302 if attrib ==None:304 if attrib is None: 303 305 attrib = {} 304 306 elem = E(elementname, attrib, text) -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc94280c r7c24685 126 126 127 127 if isinstance(value, h5py.Group): 128 parent_class = class_name 128 129 self.parent_class = class_name 129 130 parent_list.append(key) … … 136 137 # Recursion step to access data within the group 137 138 self.read_children(value, parent_list) 139 self.parent_class = parent_class 138 140 self.add_intermediate() 139 141 parent_list.remove(key)
Note: See TracChangeset
for help on using the changeset viewer.