1 | """ |
---|
2 | TXT/IGOR 2D Q Map file reader |
---|
3 | """ |
---|
4 | ##################################################################### |
---|
5 | #This software was developed by the University of Tennessee as part of the |
---|
6 | #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
7 | #project funded by the US National Science Foundation. |
---|
8 | #See the license text in license.txt |
---|
9 | #copyright 2008, University of Tennessee |
---|
10 | ###################################################################### |
---|
11 | import os |
---|
12 | import math |
---|
13 | import time |
---|
14 | |
---|
15 | import numpy as np |
---|
16 | |
---|
17 | from sas.sascalc.data_util.nxsunit import Converter |
---|
18 | |
---|
19 | from ..data_info import plottable_2D, DataInfo, Detector |
---|
20 | from ..file_reader_base_class import FileReader |
---|
21 | from ..loader_exceptions import FileContentsException |
---|
22 | |
---|
23 | |
---|
24 | def check_point(x_point): |
---|
25 | """ |
---|
26 | check point validity |
---|
27 | """ |
---|
28 | # set zero for non_floats |
---|
29 | try: |
---|
30 | return float(x_point) |
---|
31 | except Exception: |
---|
32 | return 0 |
---|
33 | |
---|
34 | |
---|
35 | class Reader(FileReader): |
---|
36 | """ Simple data reader for Igor data files """ |
---|
37 | ## File type |
---|
38 | type_name = "IGOR/DAT 2D Q_map" |
---|
39 | ## Wildcards |
---|
40 | type = ["IGOR/DAT 2D file in Q_map (*.dat)|*.DAT"] |
---|
41 | ## Extension |
---|
42 | ext = ['.DAT', '.dat'] |
---|
43 | |
---|
44 | def write(self, filename, data): |
---|
45 | """ |
---|
46 | Write to .dat |
---|
47 | |
---|
48 | :param filename: file name to write |
---|
49 | :param data: data2D |
---|
50 | """ |
---|
51 | # Write the file |
---|
52 | try: |
---|
53 | fd = open(filename, 'w') |
---|
54 | t = time.localtime() |
---|
55 | time_str = time.strftime("%H:%M on %b %d %y", t) |
---|
56 | |
---|
57 | header_str = "Data columns are Qx - Qy - I(Qx,Qy)\n\nASCII data" |
---|
58 | header_str += " created at %s \n\n" % time_str |
---|
59 | # simple 2D header |
---|
60 | fd.write(header_str) |
---|
61 | # write qx qy I values |
---|
62 | for i in range(len(data.data)): |
---|
63 | fd.write("%g %g %g\n" % (data.qx_data[i], |
---|
64 | data.qy_data[i], |
---|
65 | data.data[i])) |
---|
66 | finally: |
---|
67 | fd.close() |
---|
68 | |
---|
69 | def get_file_contents(self): |
---|
70 | # Read file |
---|
71 | buf = self.readall() |
---|
72 | self.f_open.close() |
---|
73 | # Instantiate data object |
---|
74 | self.current_dataset = plottable_2D() |
---|
75 | self.current_datainfo = DataInfo() |
---|
76 | self.current_datainfo.filename = os.path.basename(self.f_open.name) |
---|
77 | self.current_datainfo.detector.append(Detector()) |
---|
78 | |
---|
79 | # Get content |
---|
80 | data_started = False |
---|
81 | |
---|
82 | ## Defaults |
---|
83 | lines = buf.split('\n') |
---|
84 | x = [] |
---|
85 | y = [] |
---|
86 | |
---|
87 | wavelength = None |
---|
88 | distance = None |
---|
89 | transmission = None |
---|
90 | |
---|
91 | pixel_x = None |
---|
92 | pixel_y = None |
---|
93 | |
---|
94 | is_info = False |
---|
95 | is_center = False |
---|
96 | |
---|
97 | # Remove the last lines before the for loop if the lines are empty |
---|
98 | # to calculate the exact number of data points |
---|
99 | count = 0 |
---|
100 | while (len(lines[len(lines) - (count + 1)].lstrip().rstrip()) < 1): |
---|
101 | del lines[len(lines) - (count + 1)] |
---|
102 | count = count + 1 |
---|
103 | |
---|
104 | #Read Header and find the dimensions of 2D data |
---|
105 | line_num = 0 |
---|
106 | # Old version NIST files: 0 |
---|
107 | ver = 0 |
---|
108 | for line in lines: |
---|
109 | line_num += 1 |
---|
110 | ## Reading the header applies only to IGOR/NIST 2D q_map data files |
---|
111 | # Find setup info line |
---|
112 | if is_info: |
---|
113 | is_info = False |
---|
114 | line_toks = line.split() |
---|
115 | # Wavelength in Angstrom |
---|
116 | try: |
---|
117 | wavelength = float(line_toks[1]) |
---|
118 | # Wavelength is stored in angstroms; convert if necessary |
---|
119 | if self.current_datainfo.source.wavelength_unit != 'A': |
---|
120 | conv = Converter('A') |
---|
121 | wavelength = conv(wavelength, |
---|
122 | units=self.current_datainfo.source.wavelength_unit) |
---|
123 | except Exception: |
---|
124 | pass # Not required |
---|
125 | try: |
---|
126 | distance = float(line_toks[3]) |
---|
127 | # Distance is stored in meters; convert if necessary |
---|
128 | if self.current_datainfo.detector[0].distance_unit != 'm': |
---|
129 | conv = Converter('m') |
---|
130 | distance = conv(distance, |
---|
131 | units=self.current_datainfo.detector[0].distance_unit) |
---|
132 | except Exception: |
---|
133 | pass # Not required |
---|
134 | |
---|
135 | try: |
---|
136 | transmission = float(line_toks[4]) |
---|
137 | except Exception: |
---|
138 | pass # Not required |
---|
139 | |
---|
140 | if line.count("LAMBDA") > 0: |
---|
141 | is_info = True |
---|
142 | |
---|
143 | # Find center info line |
---|
144 | if is_center: |
---|
145 | is_center = False |
---|
146 | line_toks = line.split() |
---|
147 | # Center in bin number |
---|
148 | center_x = float(line_toks[0]) |
---|
149 | center_y = float(line_toks[1]) |
---|
150 | |
---|
151 | if line.count("BCENT") > 0: |
---|
152 | is_center = True |
---|
153 | # Check version |
---|
154 | if line.count("Data columns") > 0: |
---|
155 | if line.count("err(I)") > 0: |
---|
156 | ver = 1 |
---|
157 | # Find data start |
---|
158 | if line.count("ASCII data") > 0: |
---|
159 | data_started = True |
---|
160 | continue |
---|
161 | |
---|
162 | ## Read and get data. |
---|
163 | if data_started: |
---|
164 | line_toks = line.split() |
---|
165 | if len(line_toks) == 0: |
---|
166 | #empty line |
---|
167 | continue |
---|
168 | # the number of columns must be stayed same |
---|
169 | col_num = len(line_toks) |
---|
170 | break |
---|
171 | |
---|
172 | # Make numpy array to remove header lines using index |
---|
173 | lines_array = np.array(lines) |
---|
174 | |
---|
175 | # index for lines_array |
---|
176 | lines_index = np.arange(len(lines)) |
---|
177 | |
---|
178 | # get the data lines |
---|
179 | data_lines = lines_array[lines_index >= (line_num - 1)] |
---|
180 | # Now we get the total number of rows (i.e., # of data points) |
---|
181 | row_num = len(data_lines) |
---|
182 | # make it as list again to control the separators |
---|
183 | data_list = " ".join(data_lines.tolist()) |
---|
184 | # split all data to one big list w/" "separator |
---|
185 | data_list = data_list.split() |
---|
186 | |
---|
187 | # Check if the size is consistent with data, otherwise |
---|
188 | #try the tab(\t) separator |
---|
189 | # (this may be removed once get the confidence |
---|
190 | #the former working all cases). |
---|
191 | if len(data_list) != (len(data_lines)) * col_num: |
---|
192 | data_list = "\t".join(data_lines.tolist()) |
---|
193 | data_list = data_list.split() |
---|
194 | |
---|
195 | # Change it(string) into float |
---|
196 | #data_list = map(float,data_list) |
---|
197 | data_list1 = list(map(check_point, data_list)) |
---|
198 | |
---|
199 | # numpy array form |
---|
200 | data_array = np.array(data_list1) |
---|
201 | # Redimesion based on the row_num and col_num, |
---|
202 | #otherwise raise an error. |
---|
203 | try: |
---|
204 | data_point = data_array.reshape(row_num, col_num).transpose() |
---|
205 | except Exception: |
---|
206 | msg = "red2d_reader can't read this file: Incorrect number of data points provided." |
---|
207 | raise FileContentsException(msg) |
---|
208 | ## Get the all data: Let's HARDcoding; Todo find better way |
---|
209 | # Defaults |
---|
210 | dqx_data = np.zeros(0) |
---|
211 | dqy_data = np.zeros(0) |
---|
212 | err_data = np.ones(row_num) |
---|
213 | qz_data = np.zeros(row_num) |
---|
214 | mask = np.ones(row_num, dtype=bool) |
---|
215 | # Get from the array |
---|
216 | qx_data = data_point[0] |
---|
217 | qy_data = data_point[1] |
---|
218 | data = data_point[2] |
---|
219 | if ver == 1: |
---|
220 | if col_num > (2 + ver): |
---|
221 | err_data = data_point[(2 + ver)] |
---|
222 | if col_num > (3 + ver): |
---|
223 | qz_data = data_point[(3 + ver)] |
---|
224 | if col_num > (4 + ver): |
---|
225 | dqx_data = data_point[(4 + ver)] |
---|
226 | if col_num > (5 + ver): |
---|
227 | dqy_data = data_point[(5 + ver)] |
---|
228 | #if col_num > (6 + ver): mask[data_point[(6 + ver)] < 1] = False |
---|
229 | q_data = np.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) |
---|
230 | |
---|
231 | # Extra protection(it is needed for some data files): |
---|
232 | # If all mask elements are False, put all True |
---|
233 | if not mask.any(): |
---|
234 | mask[mask == False] = True |
---|
235 | |
---|
236 | # Store limits of the image in q space |
---|
237 | xmin = np.min(qx_data) |
---|
238 | xmax = np.max(qx_data) |
---|
239 | ymin = np.min(qy_data) |
---|
240 | ymax = np.max(qy_data) |
---|
241 | |
---|
242 | ## calculate the range of the qx and qy_data |
---|
243 | x_size = math.fabs(xmax - xmin) |
---|
244 | y_size = math.fabs(ymax - ymin) |
---|
245 | |
---|
246 | # calculate the number of pixels in the each axes |
---|
247 | npix_y = math.floor(math.sqrt(len(data))) |
---|
248 | npix_x = math.floor(len(data) / npix_y) |
---|
249 | |
---|
250 | # calculate the size of bins |
---|
251 | xstep = x_size / (npix_x - 1) |
---|
252 | ystep = y_size / (npix_y - 1) |
---|
253 | |
---|
254 | # store x and y axis bin centers in q space |
---|
255 | x_bins = np.arange(xmin, xmax + xstep, xstep) |
---|
256 | y_bins = np.arange(ymin, ymax + ystep, ystep) |
---|
257 | |
---|
258 | # get the limits of q values |
---|
259 | xmin = xmin - xstep / 2 |
---|
260 | xmax = xmax + xstep / 2 |
---|
261 | ymin = ymin - ystep / 2 |
---|
262 | ymax = ymax + ystep / 2 |
---|
263 | |
---|
264 | #Store data in outputs |
---|
265 | #TODO: Check the lengths |
---|
266 | self.current_dataset.data = data |
---|
267 | if (err_data == 1).all(): |
---|
268 | self.current_dataset.err_data = np.sqrt(np.abs(data)) |
---|
269 | self.current_dataset.err_data[self.current_dataset.err_data == 0.0] = 1.0 |
---|
270 | else: |
---|
271 | self.current_dataset.err_data = err_data |
---|
272 | |
---|
273 | self.current_dataset.qx_data = qx_data |
---|
274 | self.current_dataset.qy_data = qy_data |
---|
275 | self.current_dataset.q_data = q_data |
---|
276 | self.current_dataset.mask = mask |
---|
277 | |
---|
278 | self.current_dataset.x_bins = x_bins |
---|
279 | self.current_dataset.y_bins = y_bins |
---|
280 | |
---|
281 | self.current_dataset.xmin = xmin |
---|
282 | self.current_dataset.xmax = xmax |
---|
283 | self.current_dataset.ymin = ymin |
---|
284 | self.current_dataset.ymax = ymax |
---|
285 | |
---|
286 | self.current_datainfo.source.wavelength = wavelength |
---|
287 | |
---|
288 | # Store pixel size in mm |
---|
289 | self.current_datainfo.detector[0].pixel_size.x = pixel_x |
---|
290 | self.current_datainfo.detector[0].pixel_size.y = pixel_y |
---|
291 | |
---|
292 | # Store the sample to detector distance |
---|
293 | self.current_datainfo.detector[0].distance = distance |
---|
294 | |
---|
295 | # optional data: if all of dq data == 0, do not pass to output |
---|
296 | if len(dqx_data) == len(qx_data) and dqx_data.any() != 0: |
---|
297 | # if no dqx_data, do not pass dqy_data. |
---|
298 | #(1 axis dq is not supported yet). |
---|
299 | if len(dqy_data) == len(qy_data) and dqy_data.any() != 0: |
---|
300 | # Currently we do not support dq parr, perp. |
---|
301 | # tranfer the comp. to cartesian coord. for newer version. |
---|
302 | if ver != 1: |
---|
303 | diag = np.sqrt(qx_data * qx_data + qy_data * qy_data) |
---|
304 | cos_th = qx_data / diag |
---|
305 | sin_th = qy_data / diag |
---|
306 | self.current_dataset.dqx_data = np.sqrt((dqx_data * cos_th) * \ |
---|
307 | (dqx_data * cos_th) \ |
---|
308 | + (dqy_data * sin_th) * \ |
---|
309 | (dqy_data * sin_th)) |
---|
310 | self.current_dataset.dqy_data = np.sqrt((dqx_data * sin_th) * \ |
---|
311 | (dqx_data * sin_th) \ |
---|
312 | + (dqy_data * cos_th) * \ |
---|
313 | (dqy_data * cos_th)) |
---|
314 | else: |
---|
315 | self.current_dataset.dqx_data = dqx_data |
---|
316 | self.current_dataset.dqy_data = dqy_data |
---|
317 | |
---|
318 | # Units of axes |
---|
319 | self.current_dataset = self.set_default_2d_units(self.current_dataset) |
---|
320 | |
---|
321 | # Store loading process information |
---|
322 | self.current_datainfo.meta_data['loader'] = self.type_name |
---|
323 | |
---|
324 | self.send_to_output() |
---|