1 | """ |
---|
2 | IGOR 1D data reader |
---|
3 | """ |
---|
4 | ##################################################################### |
---|
5 | # This software was developed by the University of Tennessee as part of the |
---|
6 | # Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
7 | # project funded by the US National Science Foundation. |
---|
8 | # See the license text in license.txt |
---|
9 | # copyright 2008, University of Tennessee |
---|
10 | ###################################################################### |
---|
11 | |
---|
12 | import logging |
---|
13 | import numpy as np |
---|
14 | from sas.sascalc.dataloader.file_reader_base_class import FileReader |
---|
15 | from sas.sascalc.dataloader.data_info import DataInfo, plottable_1D, Data1D,\ |
---|
16 | Detector |
---|
17 | from sas.sascalc.dataloader.loader_exceptions import FileContentsException,\ |
---|
18 | DefaultReaderException |
---|
19 | |
---|
20 | logger = logging.getLogger(__name__) |
---|
21 | |
---|
22 | |
---|
23 | class Reader(FileReader): |
---|
24 | """ |
---|
25 | Class to load IGOR reduced .ABS files |
---|
26 | """ |
---|
27 | # File type |
---|
28 | type_name = "IGOR 1D" |
---|
29 | # Wildcards |
---|
30 | type = ["IGOR 1D files (*.abs)|*.abs"] |
---|
31 | # List of allowed extensions |
---|
32 | ext = ['.abs'] |
---|
33 | |
---|
34 | def get_file_contents(self): |
---|
35 | """ |
---|
36 | Get the contents of the file |
---|
37 | |
---|
38 | :raise RuntimeError: when the file can't be opened |
---|
39 | :raise ValueError: when the length of the data vectors are inconsistent |
---|
40 | """ |
---|
41 | buff = self.f_open.read() |
---|
42 | filepath = self.f_open.name |
---|
43 | lines = buff.splitlines() |
---|
44 | self.has_converter = True |
---|
45 | try: |
---|
46 | from sas.sascalc.data_util.nxsunit import Converter |
---|
47 | except: |
---|
48 | self.has_converter = False |
---|
49 | self.output = [] |
---|
50 | self.current_datainfo = DataInfo() |
---|
51 | self.current_datainfo.filename = filepath |
---|
52 | self.reset_data_list(len(lines)) |
---|
53 | detector = Detector() |
---|
54 | data_line = 0 |
---|
55 | self.reset_data_list(len(lines)) |
---|
56 | self.current_datainfo.detector.append(detector) |
---|
57 | self.current_datainfo.filename = filepath |
---|
58 | |
---|
59 | is_info = False |
---|
60 | is_center = False |
---|
61 | is_data_started = False |
---|
62 | |
---|
63 | base_q_unit = '1/A' |
---|
64 | base_i_unit = '1/cm' |
---|
65 | data_conv_q = Converter(base_q_unit) |
---|
66 | data_conv_i = Converter(base_i_unit) |
---|
67 | |
---|
68 | for line in lines: |
---|
69 | # Information line 1 |
---|
70 | if is_info: |
---|
71 | is_info = False |
---|
72 | line_toks = line.split() |
---|
73 | |
---|
74 | # Wavelength in Angstrom |
---|
75 | try: |
---|
76 | value = float(line_toks[1]) |
---|
77 | if self.has_converter and \ |
---|
78 | self.current_datainfo.source.wavelength_unit != 'A': |
---|
79 | conv = Converter('A') |
---|
80 | self.current_datainfo.source.wavelength = conv(value, |
---|
81 | units=self.current_datainfo.source.wavelength_unit) |
---|
82 | else: |
---|
83 | self.current_datainfo.source.wavelength = value |
---|
84 | except KeyError: |
---|
85 | msg = "ABSReader cannot read wavelength from %s" % filepath |
---|
86 | self.current_datainfo.errors.append(msg) |
---|
87 | |
---|
88 | # Detector distance in meters |
---|
89 | try: |
---|
90 | value = float(line_toks[3]) |
---|
91 | if self.has_converter and detector.distance_unit != 'm': |
---|
92 | conv = Converter('m') |
---|
93 | detector.distance = conv(value, |
---|
94 | units=detector.distance_unit) |
---|
95 | else: |
---|
96 | detector.distance = value |
---|
97 | except: |
---|
98 | msg = "ABSReader cannot read SDD from %s" % filepath |
---|
99 | self.current_datainfo.errors.append(msg) |
---|
100 | |
---|
101 | # Transmission |
---|
102 | try: |
---|
103 | self.current_datainfo.sample.transmission = \ |
---|
104 | float(line_toks[4]) |
---|
105 | except ValueError: |
---|
106 | # Transmission isn't always in the header |
---|
107 | pass |
---|
108 | |
---|
109 | # Sample thickness in mm |
---|
110 | try: |
---|
111 | value = float(line_toks[5][:-1]) |
---|
112 | if self.has_converter and \ |
---|
113 | self.current_datainfo.sample.thickness_unit != 'cm': |
---|
114 | conv = Converter('cm') |
---|
115 | self.current_datainfo.sample.thickness = conv(value, |
---|
116 | units=self.current_datainfo.sample.thickness_unit) |
---|
117 | else: |
---|
118 | self.current_datainfo.sample.thickness = value |
---|
119 | except ValueError: |
---|
120 | # Thickness is not a mandatory entry |
---|
121 | pass |
---|
122 | |
---|
123 | # MON CNT LAMBDA DET ANG DET DIST TRANS THICK AVE STEP |
---|
124 | if line.count("LAMBDA") > 0: |
---|
125 | is_info = True |
---|
126 | |
---|
127 | # Find center info line |
---|
128 | if is_center: |
---|
129 | is_center = False |
---|
130 | line_toks = line.split() |
---|
131 | # Center in bin number |
---|
132 | center_x = float(line_toks[0]) |
---|
133 | center_y = float(line_toks[1]) |
---|
134 | |
---|
135 | # Bin size |
---|
136 | if self.has_converter and detector.pixel_size_unit != 'mm': |
---|
137 | conv = Converter('mm') |
---|
138 | detector.pixel_size.x = conv(5.08, |
---|
139 | units=detector.pixel_size_unit) |
---|
140 | detector.pixel_size.y = conv(5.08, |
---|
141 | units=detector.pixel_size_unit) |
---|
142 | else: |
---|
143 | detector.pixel_size.x = 5.08 |
---|
144 | detector.pixel_size.y = 5.08 |
---|
145 | |
---|
146 | # Store beam center in distance units |
---|
147 | # Det 640 x 640 mm |
---|
148 | if self.has_converter and detector.beam_center_unit != 'mm': |
---|
149 | conv = Converter('mm') |
---|
150 | detector.beam_center.x = conv(center_x * 5.08, |
---|
151 | units=detector.beam_center_unit) |
---|
152 | detector.beam_center.y = conv(center_y * 5.08, |
---|
153 | units=detector.beam_center_unit) |
---|
154 | else: |
---|
155 | detector.beam_center.x = center_x * 5.08 |
---|
156 | detector.beam_center.y = center_y * 5.08 |
---|
157 | |
---|
158 | # Detector type |
---|
159 | try: |
---|
160 | detector.name = line_toks[7] |
---|
161 | except: |
---|
162 | # Detector name is not a mandatory entry |
---|
163 | pass |
---|
164 | |
---|
165 | # BCENT(X,Y) A1(mm) A2(mm) A1A2DIST(m) DL/L BSTOP(mm) DET_TYP |
---|
166 | if line.count("BCENT") > 0: |
---|
167 | is_center = True |
---|
168 | |
---|
169 | # Parse the data |
---|
170 | if is_data_started: |
---|
171 | toks = line.split() |
---|
172 | |
---|
173 | try: |
---|
174 | _x = float(toks[0]) |
---|
175 | _y = float(toks[1]) |
---|
176 | _dy = float(toks[2]) |
---|
177 | _dx = float(toks[3]) |
---|
178 | |
---|
179 | if data_conv_q is not None: |
---|
180 | _x = data_conv_q(_x, units=base_q_unit) |
---|
181 | _dx = data_conv_q(_dx, units=base_q_unit) |
---|
182 | |
---|
183 | if data_conv_i is not None: |
---|
184 | _y = data_conv_i(_y, units=base_i_unit) |
---|
185 | _dy = data_conv_i(_dy, units=base_i_unit) |
---|
186 | |
---|
187 | self.current_dataset.x[data_line] = _x |
---|
188 | self.current_dataset.y[data_line] = _y |
---|
189 | self.current_dataset.dy[data_line] = _dy |
---|
190 | self.current_dataset.dx[data_line] = _dx |
---|
191 | data_line += 1 |
---|
192 | |
---|
193 | except ValueError: |
---|
194 | # Could not read this data line. If we are here |
---|
195 | # it is because we are in the data section. Just |
---|
196 | # skip it. |
---|
197 | pass |
---|
198 | |
---|
199 | # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. |
---|
200 | # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| |
---|
201 | if line.count("The 6 columns") > 0: |
---|
202 | is_data_started = True |
---|
203 | |
---|
204 | self.remove_empty_q_values() |
---|
205 | |
---|
206 | # Sanity check |
---|
207 | if not len(self.current_dataset.y) == len(self.current_dataset.dy): |
---|
208 | self.set_all_to_none() |
---|
209 | msg = "abs_reader: y and dy have different length" |
---|
210 | raise ValueError(msg) |
---|
211 | # If the data length is zero, consider this as |
---|
212 | # though we were not able to read the file. |
---|
213 | if len(self.current_dataset.x) == 0: |
---|
214 | self.set_all_to_none() |
---|
215 | raise ValueError("ascii_reader: could not load file") |
---|
216 | |
---|
217 | if data_conv_q is not None: |
---|
218 | self.current_dataset.xaxis("\\rm{Q}", base_q_unit) |
---|
219 | else: |
---|
220 | self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') |
---|
221 | if data_conv_i is not None: |
---|
222 | self.current_dataset.yaxis("\\rm{Intensity}", base_i_unit) |
---|
223 | else: |
---|
224 | self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") |
---|
225 | |
---|
226 | # Store loading process information |
---|
227 | self.current_datainfo.meta_data['loader'] = self.type_name |
---|
228 | self.send_to_output() |
---|