[959eb01] | 1 | """ |
---|
| 2 | TXT/IGOR 2D Q Map file reader |
---|
| 3 | """ |
---|
| 4 | ##################################################################### |
---|
| 5 | #This software was developed by the University of Tennessee as part of the |
---|
| 6 | #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) |
---|
| 7 | #project funded by the US National Science Foundation. |
---|
| 8 | #See the license text in license.txt |
---|
| 9 | #copyright 2008, University of Tennessee |
---|
| 10 | ###################################################################### |
---|
[a1b8fee] | 11 | from __future__ import print_function |
---|
| 12 | |
---|
[959eb01] | 13 | import os |
---|
| 14 | import numpy as np |
---|
| 15 | import math |
---|
| 16 | from sas.sascalc.dataloader.data_info import Data2D, Detector |
---|
| 17 | |
---|
| 18 | # Look for unit converter |
---|
| 19 | has_converter = True |
---|
| 20 | try: |
---|
| 21 | from sas.sascalc.data_util.nxsunit import Converter |
---|
| 22 | except: |
---|
| 23 | has_converter = False |
---|
| 24 | |
---|
| 25 | |
---|
| 26 | def check_point(x_point): |
---|
| 27 | """ |
---|
| 28 | check point validity |
---|
| 29 | """ |
---|
| 30 | # set zero for non_floats |
---|
| 31 | try: |
---|
| 32 | return float(x_point) |
---|
| 33 | except: |
---|
| 34 | return 0 |
---|
| 35 | |
---|
| 36 | |
---|
| 37 | class Reader: |
---|
| 38 | """ Simple data reader for Igor data files """ |
---|
| 39 | ## File type |
---|
| 40 | type_name = "IGOR/DAT 2D Q_map" |
---|
| 41 | ## Wildcards |
---|
| 42 | type = ["IGOR/DAT 2D file in Q_map (*.dat)|*.DAT"] |
---|
| 43 | ## Extension |
---|
| 44 | ext = ['.DAT', '.dat'] |
---|
| 45 | |
---|
| 46 | def write(self, filename, data): |
---|
| 47 | """ |
---|
| 48 | Write to .dat |
---|
| 49 | |
---|
| 50 | :param filename: file name to write |
---|
| 51 | :param data: data2D |
---|
| 52 | """ |
---|
| 53 | import time |
---|
| 54 | # Write the file |
---|
| 55 | fd = open(filename, 'w') |
---|
| 56 | t = time.localtime() |
---|
| 57 | time_str = time.strftime("%H:%M on %b %d %y", t) |
---|
| 58 | |
---|
| 59 | header_str = "Data columns are Qx - Qy - I(Qx,Qy)\n\nASCII data" |
---|
| 60 | header_str += " created at %s \n\n" % time_str |
---|
| 61 | # simple 2D header |
---|
| 62 | fd.write(header_str) |
---|
| 63 | # write qx qy I values |
---|
| 64 | for i in range(len(data.data)): |
---|
| 65 | fd.write("%g %g %g\n" % (data.qx_data[i], |
---|
| 66 | data.qy_data[i], |
---|
| 67 | data.data[i])) |
---|
| 68 | # close |
---|
| 69 | fd.close() |
---|
| 70 | |
---|
| 71 | def read(self, filename=None): |
---|
| 72 | """ Read file """ |
---|
| 73 | if not os.path.isfile(filename): |
---|
| 74 | raise ValueError, \ |
---|
| 75 | "Specified file %s is not a regular file" % filename |
---|
| 76 | |
---|
| 77 | # Read file |
---|
| 78 | f = open(filename, 'r') |
---|
| 79 | buf = f.read() |
---|
| 80 | f.close() |
---|
| 81 | # Instantiate data object |
---|
| 82 | output = Data2D() |
---|
| 83 | output.filename = os.path.basename(filename) |
---|
| 84 | detector = Detector() |
---|
| 85 | if len(output.detector) > 0: |
---|
[9c3d784] | 86 | print(str(output.detector[0])) |
---|
[959eb01] | 87 | output.detector.append(detector) |
---|
| 88 | |
---|
| 89 | # Get content |
---|
| 90 | dataStarted = False |
---|
| 91 | |
---|
| 92 | ## Defaults |
---|
| 93 | lines = buf.split('\n') |
---|
| 94 | x = [] |
---|
| 95 | y = [] |
---|
| 96 | |
---|
| 97 | wavelength = None |
---|
| 98 | distance = None |
---|
| 99 | transmission = None |
---|
| 100 | |
---|
| 101 | pixel_x = None |
---|
| 102 | pixel_y = None |
---|
| 103 | |
---|
| 104 | isInfo = False |
---|
| 105 | isCenter = False |
---|
| 106 | |
---|
| 107 | data_conv_q = None |
---|
| 108 | data_conv_i = None |
---|
| 109 | |
---|
| 110 | # Set units: This is the unit assumed for Q and I in the data file. |
---|
| 111 | if has_converter == True and output.Q_unit != '1/A': |
---|
| 112 | data_conv_q = Converter('1/A') |
---|
| 113 | # Test it |
---|
| 114 | data_conv_q(1.0, output.Q_unit) |
---|
| 115 | |
---|
| 116 | if has_converter == True and output.I_unit != '1/cm': |
---|
| 117 | data_conv_i = Converter('1/cm') |
---|
| 118 | # Test it |
---|
| 119 | data_conv_i(1.0, output.I_unit) |
---|
| 120 | |
---|
| 121 | |
---|
| 122 | # Remove the last lines before the for loop if the lines are empty |
---|
| 123 | # to calculate the exact number of data points |
---|
| 124 | count = 0 |
---|
| 125 | while (len(lines[len(lines) - (count + 1)].lstrip().rstrip()) < 1): |
---|
| 126 | del lines[len(lines) - (count + 1)] |
---|
| 127 | count = count + 1 |
---|
| 128 | |
---|
| 129 | #Read Header and find the dimensions of 2D data |
---|
| 130 | line_num = 0 |
---|
| 131 | # Old version NIST files: 0 |
---|
| 132 | ver = 0 |
---|
| 133 | for line in lines: |
---|
| 134 | line_num += 1 |
---|
| 135 | ## Reading the header applies only to IGOR/NIST 2D q_map data files |
---|
| 136 | # Find setup info line |
---|
| 137 | if isInfo: |
---|
| 138 | isInfo = False |
---|
| 139 | line_toks = line.split() |
---|
| 140 | # Wavelength in Angstrom |
---|
| 141 | try: |
---|
| 142 | wavelength = float(line_toks[1]) |
---|
| 143 | # Units |
---|
| 144 | if has_converter == True and \ |
---|
| 145 | output.source.wavelength_unit != 'A': |
---|
| 146 | conv = Converter('A') |
---|
| 147 | wavelength = conv(wavelength, |
---|
| 148 | units=output.source.wavelength_unit) |
---|
| 149 | except: |
---|
| 150 | #Not required |
---|
| 151 | pass |
---|
| 152 | # Distance in mm |
---|
| 153 | try: |
---|
| 154 | distance = float(line_toks[3]) |
---|
| 155 | # Units |
---|
| 156 | if has_converter == True and detector.distance_unit != 'm': |
---|
| 157 | conv = Converter('m') |
---|
| 158 | distance = conv(distance, units=detector.distance_unit) |
---|
| 159 | except: |
---|
| 160 | #Not required |
---|
| 161 | pass |
---|
| 162 | |
---|
| 163 | # Distance in meters |
---|
| 164 | try: |
---|
| 165 | transmission = float(line_toks[4]) |
---|
| 166 | except: |
---|
| 167 | #Not required |
---|
| 168 | pass |
---|
| 169 | |
---|
| 170 | if line.count("LAMBDA") > 0: |
---|
| 171 | isInfo = True |
---|
| 172 | |
---|
| 173 | # Find center info line |
---|
| 174 | if isCenter: |
---|
| 175 | isCenter = False |
---|
| 176 | line_toks = line.split() |
---|
| 177 | # Center in bin number |
---|
| 178 | center_x = float(line_toks[0]) |
---|
| 179 | center_y = float(line_toks[1]) |
---|
| 180 | |
---|
| 181 | if line.count("BCENT") > 0: |
---|
| 182 | isCenter = True |
---|
| 183 | # Check version |
---|
| 184 | if line.count("Data columns") > 0: |
---|
| 185 | if line.count("err(I)") > 0: |
---|
| 186 | ver = 1 |
---|
| 187 | # Find data start |
---|
| 188 | if line.count("ASCII data") > 0: |
---|
| 189 | dataStarted = True |
---|
| 190 | continue |
---|
| 191 | |
---|
| 192 | ## Read and get data. |
---|
| 193 | if dataStarted == True: |
---|
| 194 | line_toks = line.split() |
---|
| 195 | if len(line_toks) == 0: |
---|
| 196 | #empty line |
---|
| 197 | continue |
---|
| 198 | # the number of columns must be stayed same |
---|
| 199 | col_num = len(line_toks) |
---|
| 200 | break |
---|
| 201 | # Make numpy array to remove header lines using index |
---|
| 202 | lines_array = np.array(lines) |
---|
| 203 | |
---|
| 204 | # index for lines_array |
---|
| 205 | lines_index = np.arange(len(lines)) |
---|
| 206 | |
---|
| 207 | # get the data lines |
---|
| 208 | data_lines = lines_array[lines_index >= (line_num - 1)] |
---|
| 209 | # Now we get the total number of rows (i.e., # of data points) |
---|
| 210 | row_num = len(data_lines) |
---|
| 211 | # make it as list again to control the separators |
---|
| 212 | data_list = " ".join(data_lines.tolist()) |
---|
| 213 | # split all data to one big list w/" "separator |
---|
| 214 | data_list = data_list.split() |
---|
| 215 | |
---|
| 216 | # Check if the size is consistent with data, otherwise |
---|
| 217 | #try the tab(\t) separator |
---|
| 218 | # (this may be removed once get the confidence |
---|
| 219 | #the former working all cases). |
---|
| 220 | if len(data_list) != (len(data_lines)) * col_num: |
---|
| 221 | data_list = "\t".join(data_lines.tolist()) |
---|
| 222 | data_list = data_list.split() |
---|
| 223 | |
---|
| 224 | # Change it(string) into float |
---|
| 225 | #data_list = map(float,data_list) |
---|
| 226 | data_list1 = map(check_point, data_list) |
---|
| 227 | |
---|
| 228 | # numpy array form |
---|
| 229 | data_array = np.array(data_list1) |
---|
| 230 | # Redimesion based on the row_num and col_num, |
---|
| 231 | #otherwise raise an error. |
---|
| 232 | try: |
---|
| 233 | data_point = data_array.reshape(row_num, col_num).transpose() |
---|
| 234 | except: |
---|
| 235 | msg = "red2d_reader: Can't read this file: Not a proper file format" |
---|
| 236 | raise ValueError, msg |
---|
| 237 | ## Get the all data: Let's HARDcoding; Todo find better way |
---|
| 238 | # Defaults |
---|
| 239 | dqx_data = np.zeros(0) |
---|
| 240 | dqy_data = np.zeros(0) |
---|
| 241 | err_data = np.ones(row_num) |
---|
| 242 | qz_data = np.zeros(row_num) |
---|
| 243 | mask = np.ones(row_num, dtype=bool) |
---|
| 244 | # Get from the array |
---|
| 245 | qx_data = data_point[0] |
---|
| 246 | qy_data = data_point[1] |
---|
| 247 | data = data_point[2] |
---|
| 248 | if ver == 1: |
---|
| 249 | if col_num > (2 + ver): |
---|
| 250 | err_data = data_point[(2 + ver)] |
---|
| 251 | if col_num > (3 + ver): |
---|
| 252 | qz_data = data_point[(3 + ver)] |
---|
| 253 | if col_num > (4 + ver): |
---|
| 254 | dqx_data = data_point[(4 + ver)] |
---|
| 255 | if col_num > (5 + ver): |
---|
| 256 | dqy_data = data_point[(5 + ver)] |
---|
| 257 | #if col_num > (6 + ver): mask[data_point[(6 + ver)] < 1] = False |
---|
| 258 | q_data = np.sqrt(qx_data*qx_data+qy_data*qy_data+qz_data*qz_data) |
---|
| 259 | |
---|
| 260 | # Extra protection(it is needed for some data files): |
---|
| 261 | # If all mask elements are False, put all True |
---|
| 262 | if not mask.any(): |
---|
| 263 | mask[mask == False] = True |
---|
| 264 | |
---|
| 265 | # Store limits of the image in q space |
---|
| 266 | xmin = np.min(qx_data) |
---|
| 267 | xmax = np.max(qx_data) |
---|
| 268 | ymin = np.min(qy_data) |
---|
| 269 | ymax = np.max(qy_data) |
---|
| 270 | |
---|
| 271 | # units |
---|
| 272 | if has_converter == True and output.Q_unit != '1/A': |
---|
| 273 | xmin = data_conv_q(xmin, units=output.Q_unit) |
---|
| 274 | xmax = data_conv_q(xmax, units=output.Q_unit) |
---|
| 275 | ymin = data_conv_q(ymin, units=output.Q_unit) |
---|
| 276 | ymax = data_conv_q(ymax, units=output.Q_unit) |
---|
| 277 | |
---|
| 278 | ## calculate the range of the qx and qy_data |
---|
| 279 | x_size = math.fabs(xmax - xmin) |
---|
| 280 | y_size = math.fabs(ymax - ymin) |
---|
| 281 | |
---|
| 282 | # calculate the number of pixels in the each axes |
---|
| 283 | npix_y = math.floor(math.sqrt(len(data))) |
---|
| 284 | npix_x = math.floor(len(data) / npix_y) |
---|
| 285 | |
---|
| 286 | # calculate the size of bins |
---|
| 287 | xstep = x_size / (npix_x - 1) |
---|
| 288 | ystep = y_size / (npix_y - 1) |
---|
| 289 | |
---|
| 290 | # store x and y axis bin centers in q space |
---|
| 291 | x_bins = np.arange(xmin, xmax + xstep, xstep) |
---|
| 292 | y_bins = np.arange(ymin, ymax + ystep, ystep) |
---|
| 293 | |
---|
| 294 | # get the limits of q values |
---|
| 295 | xmin = xmin - xstep / 2 |
---|
| 296 | xmax = xmax + xstep / 2 |
---|
| 297 | ymin = ymin - ystep / 2 |
---|
| 298 | ymax = ymax + ystep / 2 |
---|
| 299 | |
---|
| 300 | #Store data in outputs |
---|
| 301 | #TODO: Check the lengths |
---|
| 302 | output.data = data |
---|
| 303 | if (err_data == 1).all(): |
---|
| 304 | output.err_data = np.sqrt(np.abs(data)) |
---|
| 305 | output.err_data[output.err_data == 0.0] = 1.0 |
---|
| 306 | else: |
---|
| 307 | output.err_data = err_data |
---|
| 308 | |
---|
| 309 | output.qx_data = qx_data |
---|
| 310 | output.qy_data = qy_data |
---|
| 311 | output.q_data = q_data |
---|
| 312 | output.mask = mask |
---|
| 313 | |
---|
| 314 | output.x_bins = x_bins |
---|
| 315 | output.y_bins = y_bins |
---|
| 316 | |
---|
| 317 | output.xmin = xmin |
---|
| 318 | output.xmax = xmax |
---|
| 319 | output.ymin = ymin |
---|
| 320 | output.ymax = ymax |
---|
| 321 | |
---|
| 322 | output.source.wavelength = wavelength |
---|
| 323 | |
---|
| 324 | # Store pixel size in mm |
---|
| 325 | detector.pixel_size.x = pixel_x |
---|
| 326 | detector.pixel_size.y = pixel_y |
---|
| 327 | |
---|
| 328 | # Store the sample to detector distance |
---|
| 329 | detector.distance = distance |
---|
| 330 | |
---|
| 331 | # optional data: if all of dq data == 0, do not pass to output |
---|
| 332 | if len(dqx_data) == len(qx_data) and dqx_data.any() != 0: |
---|
| 333 | # if no dqx_data, do not pass dqy_data. |
---|
| 334 | #(1 axis dq is not supported yet). |
---|
| 335 | if len(dqy_data) == len(qy_data) and dqy_data.any() != 0: |
---|
| 336 | # Currently we do not support dq parr, perp. |
---|
| 337 | # tranfer the comp. to cartesian coord. for newer version. |
---|
| 338 | if ver != 1: |
---|
| 339 | diag = np.sqrt(qx_data * qx_data + qy_data * qy_data) |
---|
| 340 | cos_th = qx_data / diag |
---|
| 341 | sin_th = qy_data / diag |
---|
| 342 | output.dqx_data = np.sqrt((dqx_data * cos_th) * \ |
---|
| 343 | (dqx_data * cos_th) \ |
---|
| 344 | + (dqy_data * sin_th) * \ |
---|
| 345 | (dqy_data * sin_th)) |
---|
| 346 | output.dqy_data = np.sqrt((dqx_data * sin_th) * \ |
---|
| 347 | (dqx_data * sin_th) \ |
---|
| 348 | + (dqy_data * cos_th) * \ |
---|
| 349 | (dqy_data * cos_th)) |
---|
| 350 | else: |
---|
| 351 | output.dqx_data = dqx_data |
---|
| 352 | output.dqy_data = dqy_data |
---|
| 353 | |
---|
| 354 | # Units of axes |
---|
| 355 | if data_conv_q is not None: |
---|
| 356 | output.xaxis("\\rm{Q_{x}}", output.Q_unit) |
---|
| 357 | output.yaxis("\\rm{Q_{y}}", output.Q_unit) |
---|
| 358 | else: |
---|
| 359 | output.xaxis("\\rm{Q_{x}}", 'A^{-1}') |
---|
| 360 | output.yaxis("\\rm{Q_{y}}", 'A^{-1}') |
---|
| 361 | if data_conv_i is not None: |
---|
| 362 | output.zaxis("\\rm{Intensity}", output.I_unit) |
---|
| 363 | else: |
---|
| 364 | output.zaxis("\\rm{Intensity}", "cm^{-1}") |
---|
| 365 | |
---|
| 366 | # Store loading process information |
---|
| 367 | output.meta_data['loader'] = self.type_name |
---|
| 368 | |
---|
| 369 | return output |
---|