Changeset ad92c5a in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Apr 18, 2017 2:56:03 PM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 080d88e
- Parents:
- da8bb53
- git-author:
- Jeff Krzywon <krzywon@…> (04/18/17 14:56:03)
- git-committer:
- krzywon <krzywon@…> (04/18/17 14:56:03)
- Location:
- src/sas/sascalc/dataloader
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/file_reader_base_class.py
rda8bb53 rad92c5a 52 52 # Try to load the file, but raise an error if unable to. 53 53 try: 54 self.load_unit_converter()55 54 self.f_open = open(filepath, 'rb') 56 55 self.get_file_contents() … … 94 93 self.output.append(data_obj) 95 94 96 def load_unit_converter(self):97 """98 Generic unit conversion import99 """100 # Check whether we have a converter available101 self.has_converter = True102 try:103 from sas.sascalc.data_util.nxsunit import Converter104 except:105 self.has_converter = False106 107 95 def sort_one_d_data(self): 108 96 """ … … 138 126 self.output = [] 139 127 128 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False): 129 """ 130 Remove any point where Q == 0 131 """ 132 x = self.current_dataset.x 133 self.current_dataset.x = self.current_dataset.x[x != 0] 134 self.current_dataset.y = self.current_dataset.y[x != 0] 135 self.current_dataset.dy = self.current_dataset.dy[x != 0] if \ 136 has_error_dy else np.zeros(len(self.current_dataset.y)) 137 self.current_dataset.dx = self.current_dataset.dx[x != 0] if \ 138 has_error_dx else np.zeros(len(self.current_dataset.x)) 139 140 def reset_data_list(self, no_lines=0): 141 """ 142 Reset the plottable_1D object 143 """ 144 # Initialize data sets with arrays the maximum possible size 145 x = np.zeros(no_lines) 146 y = np.zeros(no_lines) 147 dy = np.zeros(no_lines) 148 dx = np.zeros(no_lines) 149 self.current_dataset = plottable_1D(x, y, dx, dy) 150 140 151 @staticmethod 141 152 def splitline(line): … … 158 169 def get_file_contents(self): 159 170 """ 171 Reader specific class to access the contents of the file 160 172 All reader classes that inherit from FileReader must implement 161 173 """ -
src/sas/sascalc/dataloader/readers/abs_reader.py
r959eb01 rad92c5a 1 1 """ 2 IGOR 1D data reader 2 3 """ 3 4 ##################################################################### 4 # This software was developed by the University of Tennessee as part of the5 # Distributed Data Analysis of Neutron Scattering Experiments (DANSE)6 # project funded by the US National Science Foundation.7 # See the license text in license.txt8 # copyright 2008, University of Tennessee5 # This software was developed by the University of Tennessee as part of the 6 # Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 7 # project funded by the US National Science Foundation. 8 # See the license text in license.txt 9 # copyright 2008, University of Tennessee 9 10 ###################################################################### 10 11 12 import logging 11 13 import numpy as np 12 import os 13 from sas.sascalc.dataloader.data_info import Data1D 14 from sas.sascalc.dataloader.data_info import Detector 15 16 has_converter = True 17 try: 18 from sas.sascalc.data_util.nxsunit import Converter 19 except: 20 has_converter = False 21 22 23 class Reader: 14 from sas.sascalc.dataloader.file_reader_base_class import FileReader 15 from sas.sascalc.dataloader.data_info import DataInfo, plottable_1D, Data1D,\ 16 Detector 17 from sas.sascalc.dataloader.loader_exceptions import FileContentsException,\ 18 DefaultReaderException 19 20 logger = logging.getLogger(__name__) 21 22 23 class Reader(FileReader): 24 24 """ 25 25 Class to load IGOR reduced .ABS files 26 26 """ 27 # #File type27 # File type 28 28 type_name = "IGOR 1D" 29 # #Wildcards29 # Wildcards 30 30 type = ["IGOR 1D files (*.abs)|*.abs"] 31 # #List of allowed extensions32 ext = ['.abs' , '.ABS']31 # List of allowed extensions 32 ext = ['.abs'] 33 33 34 def read(self, path):34 def get_file_contents(self): 35 35 """ 36 Load data file. 37 38 :param path: file path 39 40 :return: Data1D object, or None 36 Get the contents of the file 41 37 42 38 :raise RuntimeError: when the file can't be opened 43 39 :raise ValueError: when the length of the data vectors are inconsistent 44 40 """ 45 if os.path.isfile(path): 46 basename = os.path.basename(path) 47 root, extension = os.path.splitext(basename) 48 if extension.lower() in self.ext: 49 try: 50 input_f = open(path,'r') 41 buff = self.f_open.read() 42 filepath = self.f_open.name 43 lines = buff.splitlines() 44 self.has_converter = True 45 try: 46 from sas.sascalc.data_util.nxsunit import Converter 47 except: 48 self.has_converter = False 49 self.output = [] 50 self.current_datainfo = DataInfo() 51 self.current_datainfo.filename = filepath 52 self.reset_data_list(len(lines)) 53 detector = Detector() 54 data_line = 0 55 self.reset_data_list(len(lines)) 56 self.current_datainfo.detector.append(detector) 57 self.current_datainfo.filename = filepath 58 59 is_info = False 60 is_center = False 61 is_data_started = False 62 63 base_q_unit = '1/A' 64 base_i_unit = '1/cm' 65 data_conv_q = Converter(base_q_unit) 66 data_conv_i = Converter(base_i_unit) 67 68 for line in lines: 69 # Information line 1 70 if is_info: 71 is_info = False 72 line_toks = line.split() 73 74 # Wavelength in Angstrom 75 try: 76 value = float(line_toks[1]) 77 if self.has_converter and \ 78 self.current_datainfo.source.wavelength_unit != 'A': 79 conv = Converter('A') 80 self.current_datainfo.source.wavelength = conv(value, 81 units=self.current_datainfo.source.wavelength_unit) 82 else: 83 self.current_datainfo.source.wavelength = value 84 except KeyError: 85 msg = "ABSReader cannot read wavelength from %s" % filepath 86 self.current_datainfo.errors.append(msg) 87 88 # Detector distance in meters 89 try: 90 value = float(line_toks[3]) 91 if self.has_converter and detector.distance_unit != 'm': 92 conv = Converter('m') 93 detector.distance = conv(value, 94 units=detector.distance_unit) 95 else: 96 detector.distance = value 51 97 except: 52 raise RuntimeError, "abs_reader: cannot open %s" % path 53 buff = input_f.read() 54 lines = buff.split('\n') 55 x = np.zeros(0) 56 y = np.zeros(0) 57 dy = np.zeros(0) 58 dx = np.zeros(0) 59 output = Data1D(x, y, dy=dy, dx=dx) 60 detector = Detector() 61 output.detector.append(detector) 62 output.filename = basename 63 64 is_info = False 98 msg = "ABSReader cannot read SDD from %s" % filepath 99 self.current_datainfo.errors.append(msg) 100 101 # Transmission 102 try: 103 self.current_datainfo.sample.transmission = \ 104 float(line_toks[4]) 105 except ValueError: 106 # Transmission isn't always in the header 107 pass 108 109 # Sample thickness in mm 110 try: 111 value = float(line_toks[5]) 112 if self.has_converter and \ 113 self.current_datainfo.sample.thickness_unit != 'cm': 114 conv = Converter('cm') 115 self.current_datainfo.sample.thickness = conv(value, 116 units=self.current_datainfo.sample.thickness_unit) 117 else: 118 self.current_datainfo.sample.thickness = value 119 except ValueError: 120 # Thickness is not a mandatory entry 121 pass 122 123 # MON CNT LAMBDA DET ANG DET DIST TRANS THICK AVE STEP 124 if line.count("LAMBDA") > 0: 125 is_info = True 126 127 # Find center info line 128 if is_center: 65 129 is_center = False 66 is_data_started = False 67 68 data_conv_q = None 69 data_conv_i = None 70 71 if has_converter == True and output.x_unit != '1/A': 72 data_conv_q = Converter('1/A') 73 # Test it 74 data_conv_q(1.0, output.x_unit) 75 76 if has_converter == True and output.y_unit != '1/cm': 77 data_conv_i = Converter('1/cm') 78 # Test it 79 data_conv_i(1.0, output.y_unit) 80 81 for line in lines: 82 83 # Information line 1 84 if is_info == True: 85 is_info = False 86 line_toks = line.split() 87 88 # Wavelength in Angstrom 89 try: 90 value = float(line_toks[1]) 91 if has_converter == True and \ 92 output.source.wavelength_unit != 'A': 93 conv = Converter('A') 94 output.source.wavelength = conv(value, 95 units=output.source.wavelength_unit) 96 else: 97 output.source.wavelength = value 98 except: 99 #goes to ASC reader 100 msg = "abs_reader: cannot open %s" % path 101 raise RuntimeError, msg 102 103 # Distance in meters 104 try: 105 value = float(line_toks[3]) 106 if has_converter == True and \ 107 detector.distance_unit != 'm': 108 conv = Converter('m') 109 detector.distance = conv(value, 110 units=detector.distance_unit) 111 else: 112 detector.distance = value 113 except: 114 #goes to ASC reader 115 msg = "abs_reader: cannot open %s" % path 116 raise RuntimeError, msg 117 # Transmission 118 try: 119 output.sample.transmission = float(line_toks[4]) 120 except: 121 # Transmission is not a mandatory entry 122 pass 123 124 # Thickness in mm 125 try: 126 value = float(line_toks[5]) 127 if has_converter == True and \ 128 output.sample.thickness_unit != 'cm': 129 conv = Converter('cm') 130 output.sample.thickness = conv(value, 131 units=output.sample.thickness_unit) 132 else: 133 output.sample.thickness = value 134 except: 135 # Thickness is not a mandatory entry 136 pass 137 138 #MON CNT LAMBDA DET ANG DET DIST TRANS THICK 139 # AVE STEP 140 if line.count("LAMBDA") > 0: 141 is_info = True 142 143 # Find center info line 144 if is_center == True: 145 is_center = False 146 line_toks = line.split() 147 # Center in bin number 148 center_x = float(line_toks[0]) 149 center_y = float(line_toks[1]) 150 151 # Bin size 152 if has_converter == True and \ 153 detector.pixel_size_unit != 'mm': 154 conv = Converter('mm') 155 detector.pixel_size.x = conv(5.0, 156 units=detector.pixel_size_unit) 157 detector.pixel_size.y = conv(5.0, 158 units=detector.pixel_size_unit) 159 else: 160 detector.pixel_size.x = 5.0 161 detector.pixel_size.y = 5.0 162 163 # Store beam center in distance units 164 # Det 640 x 640 mm 165 if has_converter == True and \ 166 detector.beam_center_unit != 'mm': 167 conv = Converter('mm') 168 detector.beam_center.x = conv(center_x * 5.0, 169 units=detector.beam_center_unit) 170 detector.beam_center.y = conv(center_y * 5.0, 171 units=detector.beam_center_unit) 172 else: 173 detector.beam_center.x = center_x * 5.0 174 detector.beam_center.y = center_y * 5.0 175 176 # Detector type 177 try: 178 detector.name = line_toks[7] 179 except: 180 # Detector name is not a mandatory entry 181 pass 182 183 #BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L 184 # BSTOP(mm) DET_TYP 185 if line.count("BCENT") > 0: 186 is_center = True 187 188 # Parse the data 189 if is_data_started == True: 190 toks = line.split() 191 192 try: 193 _x = float(toks[0]) 194 _y = float(toks[1]) 195 _dy = float(toks[2]) 196 _dx = float(toks[3]) 197 198 if data_conv_q is not None: 199 _x = data_conv_q(_x, units=output.x_unit) 200 _dx = data_conv_i(_dx, units=output.x_unit) 201 202 if data_conv_i is not None: 203 _y = data_conv_i(_y, units=output.y_unit) 204 _dy = data_conv_i(_dy, units=output.y_unit) 205 206 x = np.append(x, _x) 207 y = np.append(y, _y) 208 dy = np.append(dy, _dy) 209 dx = np.append(dx, _dx) 210 211 except: 212 # Could not read this data line. If we are here 213 # it is because we are in the data section. Just 214 # skip it. 215 pass 216 217 #The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 218 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 219 if line.count("The 6 columns") > 0: 220 is_data_started = True 221 222 # Sanity check 223 if not len(y) == len(dy): 224 msg = "abs_reader: y and dy have different length" 225 raise ValueError, msg 226 # If the data length is zero, consider this as 227 # though we were not able to read the file. 228 if len(x) == 0: 229 raise ValueError, "ascii_reader: could not load file" 230 231 output.x = x[x != 0] 232 output.y = y[x != 0] 233 output.dy = dy[x != 0] 234 output.dx = dx[x != 0] 235 if data_conv_q is not None: 236 output.xaxis("\\rm{Q}", output.x_unit) 130 line_toks = line.split() 131 # Center in bin number 132 center_x = float(line_toks[0]) 133 center_y = float(line_toks[1]) 134 135 # Bin size 136 if self.has_converter and detector.pixel_size_unit != 'mm': 137 conv = Converter('mm') 138 detector.pixel_size.x = conv(5.08, 139 units=detector.pixel_size_unit) 140 detector.pixel_size.y = conv(5.08, 141 units=detector.pixel_size_unit) 237 142 else: 238 output.xaxis("\\rm{Q}", 'A^{-1}') 239 if data_conv_i is not None: 240 output.yaxis("\\rm{Intensity}", output.y_unit) 143 detector.pixel_size.x = 5.08 144 detector.pixel_size.y = 5.08 145 146 # Store beam center in distance units 147 # Det 640 x 640 mm 148 if self.has_converter and detector.beam_center_unit != 'mm': 149 conv = Converter('mm') 150 detector.beam_center.x = conv(center_x * 5.08, 151 units=detector.beam_center_unit) 152 detector.beam_center.y = conv(center_y * 5.08, 153 units=detector.beam_center_unit) 241 154 else: 242 output.yaxis("\\rm{Intensity}", "cm^{-1}") 243 244 # Store loading process information 245 output.meta_data['loader'] = self.type_name 246 return output 155 detector.beam_center.x = center_x * 5.08 156 detector.beam_center.y = center_y * 5.08 157 158 # Detector type 159 try: 160 detector.name = line_toks[7] 161 except: 162 # Detector name is not a mandatory entry 163 pass 164 165 # BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L BSTOP(mm) DET_TYP 166 if line.count("BCENT") > 0: 167 is_center = True 168 169 # Parse the data 170 if is_data_started: 171 toks = line.split() 172 173 try: 174 _x = float(toks[0]) 175 _y = float(toks[1]) 176 _dy = float(toks[2]) 177 _dx = float(toks[3]) 178 179 if data_conv_q is not None: 180 _x = data_conv_q(_x, units=base_q_unit) 181 _dx = data_conv_q(_dx, units=base_q_unit) 182 183 if data_conv_i is not None: 184 _y = data_conv_i(_y, units=base_i_unit) 185 _dy = data_conv_i(_dy, units=base_i_unit) 186 187 self.current_dataset.x[data_line] = _x 188 self.current_dataset.y[data_line] = _y 189 self.current_dataset.dy[data_line] = _dy 190 self.current_dataset.dx[data_line] = _dx 191 data_line += 1 192 193 except ValueError: 194 # Could not read this data line. If we are here 195 # it is because we are in the data section. Just 196 # skip it. 197 pass 198 199 # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 200 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 201 if line.count("The 6 columns") > 0: 202 is_data_started = True 203 204 self.remove_empty_q_values(True, True) 205 206 # Sanity check 207 if not len(self.current_dataset.y) == len(self.current_dataset.dy): 208 self.set_all_to_none() 209 msg = "abs_reader: y and dy have different length" 210 raise ValueError(msg) 211 # If the data length is zero, consider this as 212 # though we were not able to read the file. 213 if len(self.current_dataset.x) == 0: 214 self.set_all_to_none() 215 raise ValueError("ascii_reader: could not load file") 216 217 if data_conv_q is not None: 218 self.current_dataset.xaxis("\\rm{Q}", base_q_unit) 247 219 else: 248 raise RuntimeError, "%s is not a file" % path 249 return None 220 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 221 if data_conv_i is not None: 222 self.current_dataset.yaxis("\\rm{Intensity}", base_i_unit) 223 else: 224 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") 225 226 # Store loading process information 227 self.current_datainfo.meta_data['loader'] = self.type_name 228 self.send_to_output() -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rda8bb53 rad92c5a 150 150 raise FileContentsException(msg) 151 151 152 # Remove any point where Q == 0 153 x = self.current_dataset.x 154 self.current_dataset.x = self.current_dataset.x[x != 0] 155 self.current_dataset.y = self.current_dataset.y[x != 0] 156 self.current_dataset.dy = self.current_dataset.dy[x != 0] if \ 157 has_error_dy else np.zeros(len(self.current_dataset.y)) 158 self.current_dataset.dx = self.current_dataset.dx[x != 0] if \ 159 has_error_dx else np.zeros(len(self.current_dataset.x)) 160 152 self.remove_empty_q_values(has_error_dx, has_error_dy) 161 153 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 162 154 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") … … 165 157 self.current_datainfo.meta_data['loader'] = self.type_name 166 158 self.send_to_output() 167 168 def reset_data_list(self, no_lines):169 """170 Reset the plottable_1D object171 """172 # Initialize data sets with arrays the maximum possible size173 x = np.zeros(no_lines)174 y = np.zeros(no_lines)175 dy = np.zeros(no_lines)176 dx = np.zeros(no_lines)177 self.current_dataset = plottable_1D(x, y, dx, dy)
Note: See TracChangeset
for help on using the changeset viewer.