source: sasview/src/sas/sascalc/dataloader/file_reader_base_class.py @ 248ff73

ESS_GUIESS_GUI_DocsESS_GUI_batch_fittingESS_GUI_bumps_abstractionESS_GUI_iss1116ESS_GUI_iss879ESS_GUI_iss959ESS_GUI_openclESS_GUI_orderingESS_GUI_sync_sascalccostrafo411magnetic_scattrelease-4.2.2ticket-1009ticket-1094-headlessticket-1242-2d-resolutionticket-1243ticket-1249ticket885unittest-saveload
Last change on this file since 248ff73 was 248ff73, checked in by lewis, 7 years ago

Ensure unit tests pass

utest_averaging still fails as it relies on loading in an Igor 2D file,
which is no longer possible

  • Property mode set to 100644
File size: 8.4 KB
Line 
1"""
2This is the base file reader class most file readers should inherit from.
3All generic functionality required for a file loader/reader is built into this
4class
5"""
6
7import os
8import logging
9import numpy as np
10from abc import abstractmethod
11from loader_exceptions import NoKnownLoaderException, FileContentsException,\
12    DataReaderException, DefaultReaderException
13from data_info import Data1D, Data2D, DataInfo, plottable_1D, plottable_2D,\
14    combine_data_info_with_plottable
15
16logger = logging.getLogger(__name__)
17
18
19class FileReader(object):
20    # List of Data1D and Data2D objects to be sent back to data_loader
21    output = []
22    # Current plottable_(1D/2D) object being loaded in
23    current_dataset = None
24    # Current DataInfo object being loaded in
25    current_datainfo = None
26    # String to describe the type of data this reader can load
27    type_name = "ASCII"
28    # Wildcards to display
29    type = ["Text files (*.txt|*.TXT)"]
30    # List of allowed extensions
31    ext = ['.txt']
32    # Bypass extension check and try to load anyway
33    allow_all = False
34    # Able to import the unit converter
35    has_converter = True
36    # Open file handle
37    f_open = None
38    # Default value of zero
39    _ZERO = 1e-16
40
41    def read(self, filepath):
42        """
43        Basic file reader
44
45        :param filepath: The full or relative path to a file to be loaded
46        """
47        if os.path.isfile(filepath):
48            basename, extension = os.path.splitext(os.path.basename(filepath))
49            self.extension = extension.lower()
50            # If the file type is not allowed, return nothing
51            if self.extension in self.ext or self.allow_all:
52                # Try to load the file, but raise an error if unable to.
53                try:
54                    self.f_open = open(filepath, 'rb')
55                    self.get_file_contents()
56
57                except DataReaderException as e:
58                    self.handle_error_message(e.message)
59                except OSError as e:
60                    # If the file cannot be opened
61                    msg = "Unable to open file: {}\n".format(filepath)
62                    msg += e.message
63                    self.handle_error_message(msg)
64                finally:
65                    # Close the file handle if it is open
66                    if not self.f_open.closed:
67                        self.f_open.close()
68                    if len(self.output) > 0:
69                        # Sort the data that's been loaded
70                        self.sort_one_d_data()
71                        self.sort_two_d_data()
72        else:
73            msg = "Unable to find file at: {}\n".format(filepath)
74            msg += "Please check your file path and try again."
75            self.handle_error_message(msg)
76
77        # Return a list of parsed entries that data_loader can manage
78        return self.output
79
80    def handle_error_message(self, msg):
81        """
82        Generic error handler to add an error to the current datainfo to
83        propogate the error up the error chain.
84        :param msg: Error message
85        """
86        if isinstance(self.current_datainfo, DataInfo):
87            self.current_datainfo.errors.append(msg)
88        else:
89            logger.warning(msg)
90
91    def send_to_output(self):
92        """
93        Helper that automatically combines the info and set and then appends it
94        to output
95        """
96        data_obj = combine_data_info_with_plottable(self.current_dataset,
97                                                    self.current_datainfo)
98        self.output.append(data_obj)
99
100    def sort_one_d_data(self):
101        """
102        Sort 1D data along the X axis for consistency
103        """
104        final_list = []
105        for data in self.output:
106            if isinstance(data, Data1D):
107                # Sort data by increasing x and remove 1st point
108                ind = np.lexsort((data.y, data.x))
109                data.x = np.asarray([data.x[i] for i in ind]).astype(np.float64)
110                data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64)
111                if data.dx is not None:
112                    data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64)
113                if data.dxl is not None:
114                    data.dxl = np.asarray([data.dxl[i] for i in ind]).astype(np.float64)
115                if data.dxw is not None:
116                    data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64)
117                if data.dy is not None:
118                    data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64)
119                if data.lam is not None:
120                    data.lam = np.asarray([data.lam[i] for i in ind]).astype(np.float64)
121                if data.dlam is not None:
122                    data.dlam = np.asarray([data.dlam[i] for i in ind]).astype(np.float64)
123                if len(data.x > 0):
124                    data.xmin = np.min(data.x)
125                    data.xmax = np.max(data.x)
126                    data.ymin = np.min(data.y)
127                    data.ymax = np.max(data.y)
128        #     final_list.append(data)
129        # self.output = final_list
130
131    def sort_two_d_data(self):
132        final_list = []
133        for dataset in self.output:
134            if isinstance(dataset, Data2D):
135                dataset.data = dataset.data.astype(np.float64)
136                dataset.qx_data = dataset.qx_data.astype(np.float64)
137                dataset.xmin = np.min(dataset.qx_data)
138                dataset.xmax = np.max(dataset.qx_data)
139                dataset.qy_data = dataset.qy_data.astype(np.float64)
140                dataset.ymin = np.min(dataset.qy_data)
141                dataset.ymax = np.max(dataset.qy_data)
142                dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data
143                                         + dataset.qy_data * dataset.qy_data)
144                if dataset.err_data is not None:
145                    dataset.err_data = dataset.err_data.astype(np.float64)
146                if dataset.dqx_data is not None:
147                    dataset.dqx_data = dataset.dqx_data.astype(np.float64)
148                if dataset.dqy_data is not None:
149                    dataset.dqy_data = dataset.dqy_data.astype(np.float64)
150                if dataset.mask is not None:
151                    dataset.mask = dataset.mask.astype(dtype=bool)
152
153                if len(dataset.data.shape) == 2:
154                    n_rows, n_cols = dataset.data.shape
155                    dataset.y_bins = dataset.qy_data[0::int(n_cols)]
156                    dataset.x_bins = dataset.qx_data[:int(n_cols)]
157                dataset.data = dataset.data.flatten()
158        #         final_list.append(dataset)
159        # self.output = final_list
160
161    def set_all_to_none(self):
162        """
163        Set all mutable values to None for error handling purposes
164        """
165        self.current_dataset = None
166        self.current_datainfo = None
167        self.output = []
168
169    def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False):
170        """
171        Remove any point where Q == 0
172        """
173        x = self.current_dataset.x
174        self.current_dataset.x = self.current_dataset.x[x != 0]
175        self.current_dataset.y = self.current_dataset.y[x != 0]
176        self.current_dataset.dy = self.current_dataset.dy[x != 0] if \
177            has_error_dy else np.zeros(len(self.current_dataset.y))
178        self.current_dataset.dx = self.current_dataset.dx[x != 0] if \
179            has_error_dx else np.zeros(len(self.current_dataset.x))
180
181    def reset_data_list(self, no_lines=0):
182        """
183        Reset the plottable_1D object
184        """
185        # Initialize data sets with arrays the maximum possible size
186        x = np.zeros(no_lines)
187        y = np.zeros(no_lines)
188        dy = np.zeros(no_lines)
189        dx = np.zeros(no_lines)
190        self.current_dataset = plottable_1D(x, y, dx, dy)
191
192    @staticmethod
193    def splitline(line):
194        """
195        Splits a line into pieces based on common delimeters
196        :param line: A single line of text
197        :return: list of values
198        """
199        # Initial try for CSV (split on ,)
200        toks = line.split(',')
201        # Now try SCSV (split on ;)
202        if len(toks) < 2:
203            toks = line.split(';')
204        # Now go for whitespace
205        if len(toks) < 2:
206            toks = line.split()
207        return toks
208
209    @abstractmethod
210    def get_file_contents(self):
211        """
212        Reader specific class to access the contents of the file
213        All reader classes that inherit from FileReader must implement
214        """
215        pass
Note: See TracBrowser for help on using the repository browser.