Changeset 9e6aeaf in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Sep 25, 2017 3:35:29 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 3cb3a51
- Parents:
- 9efdb29 (diff), 0315b63 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader
- Files:
-
- 17 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r574adc7 r9e6aeaf 1176 1176 final_dataset.yaxis(data._yaxis, data._yunit) 1177 1177 final_dataset.zaxis(data._zaxis, data._zunit) 1178 final_dataset.x_bins = data.x_bins 1179 final_dataset.y_bins = data.y_bins 1178 if len(data.data.shape) == 2: 1179 n_rows, n_cols = data.data.shape 1180 final_dataset.y_bins = data.qy_data[0::int(n_cols)] 1181 final_dataset.x_bins = data.qx_data[:int(n_cols)] 1180 1182 else: 1181 1183 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ -
src/sas/sascalc/dataloader/file_reader_base_class.py
r7b50f14 r9e6aeaf 197 197 dataset.x_bins = dataset.qx_data[:int(n_cols)] 198 198 dataset.data = dataset.data.flatten() 199 if len(dataset.data) > 0: 200 dataset.xmin = np.min(dataset.qx_data) 201 dataset.xmax = np.max(dataset.qx_data) 202 dataset.ymin = np.min(dataset.qy_data) 203 dataset.ymax = np.max(dataset.qx_data) 199 204 200 205 def format_unit(self, unit=None): … … 221 226 self.output = [] 222 227 223 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False, 224 has_error_dxl=False, has_error_dxw=False): 228 def data_cleanup(self): 229 """ 230 Clean up the data sets and refresh everything 231 :return: None 232 """ 233 self.remove_empty_q_values() 234 self.send_to_output() # Combine datasets with DataInfo 235 self.current_datainfo = DataInfo() # Reset DataInfo 236 237 def remove_empty_q_values(self): 225 238 """ 226 239 Remove any point where Q == 0 227 240 """ 228 x = self.current_dataset.x 229 self.current_dataset.x = self.current_dataset.x[x != 0] 230 self.current_dataset.y = self.current_dataset.y[x != 0] 231 if has_error_dy: 232 self.current_dataset.dy = self.current_dataset.dy[x != 0] 233 if has_error_dx: 234 self.current_dataset.dx = self.current_dataset.dx[x != 0] 235 if has_error_dxl: 236 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 237 if has_error_dxw: 238 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 241 if isinstance(self.current_dataset, plottable_1D): 242 # Booleans for resolutions 243 has_error_dx = self.current_dataset.dx is not None 244 has_error_dxl = self.current_dataset.dxl is not None 245 has_error_dxw = self.current_dataset.dxw is not None 246 has_error_dy = self.current_dataset.dy is not None 247 # Create arrays of zeros for non-existent resolutions 248 if has_error_dxw and not has_error_dxl: 249 array_size = self.current_dataset.dxw.size - 1 250 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 251 np.zeros([array_size])) 252 has_error_dxl = True 253 elif has_error_dxl and not has_error_dxw: 254 array_size = self.current_dataset.dxl.size - 1 255 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 256 np.zeros([array_size])) 257 has_error_dxw = True 258 elif not has_error_dxl and not has_error_dxw and not has_error_dx: 259 array_size = self.current_dataset.x.size - 1 260 self.current_dataset.dx = np.append(self.current_dataset.dx, 261 np.zeros([array_size])) 262 has_error_dx = True 263 if not has_error_dy: 264 array_size = self.current_dataset.y.size - 1 265 self.current_dataset.dy = np.append(self.current_dataset.dy, 266 np.zeros([array_size])) 267 has_error_dy = True 268 269 # Remove points where q = 0 270 x = self.current_dataset.x 271 self.current_dataset.x = self.current_dataset.x[x != 0] 272 self.current_dataset.y = self.current_dataset.y[x != 0] 273 if has_error_dy: 274 self.current_dataset.dy = self.current_dataset.dy[x != 0] 275 if has_error_dx: 276 self.current_dataset.dx = self.current_dataset.dx[x != 0] 277 if has_error_dxl: 278 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 279 if has_error_dxw: 280 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 281 elif isinstance(self.current_dataset, plottable_2D): 282 has_error_dqx = self.current_dataset.dqx_data is not None 283 has_error_dqy = self.current_dataset.dqy_data is not None 284 has_error_dy = self.current_dataset.err_data is not None 285 has_mask = self.current_dataset.mask is not None 286 x = self.current_dataset.qx_data 287 self.current_dataset.data = self.current_dataset.data[x != 0] 288 self.current_dataset.qx_data = self.current_dataset.qx_data[x != 0] 289 self.current_dataset.qy_data = self.current_dataset.qy_data[x != 0] 290 self.current_dataset.q_data = np.sqrt( 291 np.square(self.current_dataset.qx_data) + np.square( 292 self.current_dataset.qy_data)) 293 if has_error_dy: 294 self.current_dataset.err_data = self.current_dataset.err_data[x != 0] 295 if has_error_dqx: 296 self.current_dataset.dqx_data = self.current_dataset.dqx_data[x != 0] 297 if has_error_dqy: 298 self.current_dataset.dqy_data = self.current_dataset.dqy_data[x != 0] 299 if has_mask: 300 self.current_dataset.mask = self.current_dataset.mask[x != 0] 239 301 240 302 def reset_data_list(self, no_lines=0): -
src/sas/sascalc/dataloader/readers/abs_reader.py
r46cf4c9 r9e6aeaf 104 104 # Sample thickness in mm 105 105 try: 106 value = float(line_toks[5]) 106 # ABS writer adds 'C' with no space to the end of the 107 # thickness column. Remove it if it is there before 108 # converting the thickness. 109 if line_toks[5][:-1] not in '012345679.': 110 value = float(line_toks[5][:-1]) 111 else: 112 value = float(line_toks[5]) 107 113 if self.current_datainfo.sample.thickness_unit != 'cm': 108 114 conv = Converter('cm') … … 196 202 is_data_started = True 197 203 198 self.remove_empty_q_values( True, True)204 self.remove_empty_q_values() 199 205 200 206 # Sanity check -
src/sas/sascalc/dataloader/readers/ascii_reader.py
rf7d720f r9e6aeaf 156 156 raise FileContentsException(msg) 157 157 158 self.remove_empty_q_values( has_error_dx, has_error_dy)158 self.remove_empty_q_values() 159 159 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 160 160 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r9efdb29 r9e6aeaf 100 100 xml_file = self.f_open.name 101 101 # We don't sure f_open since lxml handles opnening/closing files 102 if not self.f_open.closed:103 self.f_open.close()104 105 basename, _ = os.path.splitext(os.path.basename(xml_file))106 107 102 try: 108 103 # Raises FileContentsException 109 104 self.load_file_and_schema(xml_file, schema_path) 110 self.current_datainfo = DataInfo() 111 # Raises FileContentsException if file doesn't meet CanSAS schema 105 # Parse each SASentry 106 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', 107 namespaces={ 108 'ns': self.cansas_defaults.get( 109 "ns") 110 }) 112 111 self.is_cansas(self.extension) 113 self.invalid = False # If we reach this point then file must be valid CanSAS114 115 # Parse each SASentry116 entry_list = self.xmlroot.xpath('/ns:SASroot/ns:SASentry', namespaces={117 'ns': self.cansas_defaults.get("ns")118 })119 # Look for a SASentry120 self.names.append("SASentry")121 112 self.set_processing_instructions() 122 123 113 for entry in entry_list: 124 self.current_datainfo.filename = basename + self.extension125 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D"126 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions127 114 self._parse_entry(entry) 128 115 self.data_cleanup() … … 146 133 invalid_xml = self.find_invalid_xml() 147 134 if invalid_xml != "": 135 basename, _ = os.path.splitext( 136 os.path.basename(self.f_open.name)) 148 137 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 149 138 raise DataReaderException(invalid_xml) # Handled by base class … … 160 149 except Exception as e: # Convert all other exceptions to FileContentsExceptions 161 150 raise FileContentsException(str(e)) 162 151 finally: 152 if not self.f_open.closed: 153 self.f_open.close() 163 154 164 155 def load_file_and_schema(self, xml_file, schema_path=""): … … 205 196 if not self._is_call_local() and not recurse: 206 197 self.reset_state() 198 if not recurse: 199 self.current_datainfo = DataInfo() 200 # Raises FileContentsException if file doesn't meet CanSAS schema 201 self.invalid = False 202 # Look for a SASentry 207 203 self.data = [] 208 self. current_datainfo = DataInfo()204 self.parent_class = "SASentry" 209 205 self.names.append("SASentry") 210 self.parent_class = "SASentry" 206 self.current_datainfo.meta_data["loader"] = "CanSAS XML 1D" 207 self.current_datainfo.meta_data[ 208 PREPROCESS] = self.processing_instructions 209 if self._is_call_local() and not recurse: 210 basename, _ = os.path.splitext(os.path.basename(self.f_open.name)) 211 self.current_datainfo.filename = basename + self.extension 211 212 # Create an empty dataset if no data has been passed to the reader 212 213 if self.current_dataset is None: 213 self.current_dataset = plottable_1D(np.empty(0), np.empty(0), 214 np.empty(0), np.empty(0)) 214 self._initialize_new_data_set(dom) 215 215 self.base_ns = "{" + CANSAS_NS.get(self.cansas_version).get("ns") + "}" 216 216 … … 224 224 tagname_original = tagname 225 225 # Skip this iteration when loading in save state information 226 if tagname == "fitting_plug_in" or tagname == "pr_inversion" or tagname == "invariant":226 if tagname in ["fitting_plug_in", "pr_inversion", "invariant", "corfunc"]: 227 227 continue 228 228 # Get where to store content … … 254 254 self._add_intermediate() 255 255 else: 256 # TODO: Clean this up to make it faster (fewer if/elifs) 256 257 if isinstance(self.current_dataset, plottable_2D): 257 258 data_point = node.text … … 498 499 self.sort_two_d_data() 499 500 self.reset_data_list() 500 empty = None 501 return self.output[0], empty 502 503 def data_cleanup(self): 504 """ 505 Clean up the data sets and refresh everything 506 :return: None 507 """ 508 has_error_dx = self.current_dataset.dx is not None 509 has_error_dxl = self.current_dataset.dxl is not None 510 has_error_dxw = self.current_dataset.dxw is not None 511 has_error_dy = self.current_dataset.dy is not None 512 self.remove_empty_q_values(has_error_dx=has_error_dx, 513 has_error_dxl=has_error_dxl, 514 has_error_dxw=has_error_dxw, 515 has_error_dy=has_error_dy) 516 self.send_to_output() # Combine datasets with DataInfo 517 self.current_datainfo = DataInfo() # Reset DataInfo 501 return self.output[0], None 518 502 519 503 def _is_call_local(self): … … 549 533 self.aperture = Aperture() 550 534 elif self.parent_class == 'SASdata': 551 self._check_for_empty_resolution()552 535 self.data.append(self.current_dataset) 553 536 … … 605 588 if 'unit' in attr and attr.get('unit') is not None: 606 589 try: 607 local_unit = attr['unit'] 590 unit = attr['unit'] 591 unit_list = unit.split("|") 592 if len(unit_list) > 1: 593 self.current_dataset.xaxis(unit_list[0].strip(), 594 unit_list[1].strip()) 595 local_unit = unit_list[1] 596 else: 597 local_unit = unit 608 598 unitname = self.ns_list.current_level.get("unit", "") 609 599 if "SASdetector" in self.names: … … 659 649 return node_value, value_unit 660 650 661 def _check_for_empty_resolution(self):662 """663 a method to check all resolution data sets are the same size as I and q664 """665 dql_exists = False666 dqw_exists = False667 dq_exists = False668 di_exists = False669 if self.current_dataset.dxl is not None:670 dql_exists = True671 if self.current_dataset.dxw is not None:672 dqw_exists = True673 if self.current_dataset.dx is not None:674 dq_exists = True675 if self.current_dataset.dy is not None:676 di_exists = True677 if dqw_exists and not dql_exists:678 array_size = self.current_dataset.dxw.size679 self.current_dataset.dxl = np.zeros(array_size)680 elif dql_exists and not dqw_exists:681 array_size = self.current_dataset.dxl.size682 self.current_dataset.dxw = np.zeros(array_size)683 elif not dql_exists and not dqw_exists and not dq_exists:684 array_size = self.current_dataset.x.size685 self.current_dataset.dx = np.append(self.current_dataset.dx,686 np.zeros([array_size]))687 if not di_exists:688 array_size = self.current_dataset.y.size689 self.current_dataset.dy = np.append(self.current_dataset.dy,690 np.zeros([array_size]))691 692 651 def _initialize_new_data_set(self, node=None): 693 652 if node is not None: -
src/sas/sascalc/dataloader/__init__.py
rb699768 r574adc7 1 from data_info import *2 from manipulations import *3 from readers import *1 from .data_info import * 2 from .manipulations import * 3 from .readers import * -
src/sas/sascalc/dataloader/loader.py
rdcb91cf rdc8d1c2 26 26 import time 27 27 from zipfile import ZipFile 28 28 29 from sas.sascalc.data_util.registry import ExtensionRegistry 30 29 31 # Default readers are defined in the readers sub-module 30 import readers31 from loader_exceptions import NoKnownLoaderException, FileContentsException,\32 from . import readers 33 from .loader_exceptions import NoKnownLoaderException, FileContentsException,\ 32 34 DefaultReaderException 33 from readers import ascii_reader34 from readers import cansas_reader35 from readers import cansas_reader_HDF535 from .readers import ascii_reader 36 from .readers import cansas_reader 37 from .readers import cansas_reader_HDF5 36 38 37 39 logger = logging.getLogger(__name__) … … 73 75 try: 74 76 return super(Registry, self).load(path, format=format) 77 #except Exception: raise # for debugging, don't use fallback loader 75 78 except NoKnownLoaderException as nkl_e: 76 79 pass # Try the ASCII reader … … 327 330 extlist = [ext for ext in self.extensions() if path.endswith(ext)] 328 331 # Sort matching extensions by decreasing order of length 329 extlist.sort( lambda a, b: len(a) < len(b))332 extlist.sort(key=len) 330 333 # Combine loaders for matching extensions into one big list 331 334 writers = [] … … 341 344 # Raise an error if there are no matching extensions 342 345 if len(writers) == 0: 343 raise ValueError , "Unknown file type for " + path346 raise ValueError("Unknown file type for " + path) 344 347 # All done 345 348 return writers … … 360 363 try: 361 364 return fn(path, data) 362 except :365 except Exception: 363 366 pass # give other loaders a chance to succeed 364 367 # If we get here it is because all loaders failed -
src/sas/sascalc/dataloader/manipulations.py
r324e0bf r574adc7 26 26 27 27 #from data_info import plottable_2D 28 from data_info import Data1D28 from .data_info import Data1D 29 29 30 30 -
src/sas/sascalc/dataloader/readers/__init__.py
r488f3a5 raaa801e 1 1 # Method to associate extensions to default readers 2 from associations import read_associations2 from .associations import read_associations -
src/sas/sascalc/dataloader/readers/anton_paar_saxs_reader.py
rfafe52a ra5bd87a 63 63 ## Reinitialize the class when loading a new data file to reset all class variables 64 64 self.reset_state() 65 buff = self. f_open.read()65 buff = self.readall() 66 66 self.raw_data = buff.splitlines() 67 67 self.read_data() -
src/sas/sascalc/dataloader/readers/associations.py
rce8c7bd r574adc7 40 40 """ 41 41 # For each FileType entry, get the associated reader and extension 42 for ext, reader in settings.ite ritems():42 for ext, reader in settings.items(): 43 43 if reader is not None and ext is not None: 44 44 # Associate the extension with a particular reader … … 47 47 # and remove the extra line below. 48 48 try: 49 exec "import %s" % reader50 exec "loader.associate_file_type('%s', %s)" % (ext.lower(),51 reader)52 exec "loader.associate_file_type('%s', %s)" % (ext.upper(),53 reader)49 exec("from . import %s" % reader) 50 exec("loader.associate_file_type('%s', %s)" 51 % (ext.lower(), reader)) 52 exec("loader.associate_file_type('%s', %s)" 53 % (ext.upper(), reader)) 54 54 except: 55 55 msg = "read_associations: skipping association" -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rcd57c7d4 r7b50f14 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DefaultReaderException 16 from sas.sascalc.dataloader.file_reader_base_class import FileReader 17 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 18 20 19 21 class Reader(FileReader): … … 130 132 # Get all information for the current key 131 133 value = data.get(key) 132 if value.attrs.get(u'canSAS_class') is not None: 133 class_name = value.attrs.get(u'canSAS_class') 134 else: 135 class_name = value.attrs.get(u'NX_class') 134 class_name = h5attr(value, u'canSAS_class') 135 if class_name is None: 136 class_name = h5attr(value, u'NX_class') 136 137 if class_name is not None: 137 138 class_prog = re.compile(class_name) … … 225 226 226 227 for data_point in data_set: 228 if data_point.dtype.char == 'S': 229 data_point = decode(bytes(data_point)) 227 230 # Top Level Meta Data 228 231 if key == u'definition': … … 231 234 self.current_datainfo.run.append(data_point) 232 235 try: 233 run_name = value.attrs['name']236 run_name = h5attr(value, 'name') 234 237 run_dict = {data_point: run_name} 235 238 self.current_datainfo.run_name = run_dict 236 except :239 except Exception: 237 240 pass 238 241 elif key == u'title': … … 576 579 :return: unit for the value passed to the method 577 580 """ 578 unit = value.attrs.get(u'units')581 unit = h5attr(value, u'units') 579 582 if unit is None: 580 unit = value.attrs.get(u'unit')583 unit = h5attr(value, u'unit') 581 584 # Convert the unit formats 582 585 if unit == "1/A": -
src/sas/sascalc/dataloader/readers/danse_reader.py
ra78a02f raf3e9f5 14 14 import math 15 15 import os 16 import logging 17 16 18 import numpy as np 17 import logging 18 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector19 from sas.sascalc.dataloader.manipulations import reader2D_converter20 from sas.sascalc.dataloader.file_reader_base_class import FileReader21 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException19 20 from ..data_info import plottable_2D, DataInfo, Detector 21 from ..manipulations import reader2D_converter 22 from ..file_reader_base_class import FileReader 23 from ..loader_exceptions import FileContentsException, DataReaderException 22 24 23 25 logger = logging.getLogger(__name__) … … 78 80 data_start_line = 1 79 81 while read_on: 80 line = self. f_open.readline()82 line = self.nextline() 81 83 data_start_line += 1 82 84 if line.find("DATA:") >= 0: … … 112 114 raise FileContentsException(msg) 113 115 114 for line_num, data_str in enumerate(self. f_open.readlines()):116 for line_num, data_str in enumerate(self.nextlines()): 115 117 toks = data_str.split() 116 118 try: -
src/sas/sascalc/dataloader/readers/red2d_reader.py
r2f85af7 rc8321cfc 10 10 ###################################################################### 11 11 import os 12 import math 13 import time 14 12 15 import numpy as np 13 import math 14 from sas.sascalc.dataloader.data_info import plottable_2D, DataInfo, Detector 15 from sas.sascalc.dataloader.file_reader_base_class import FileReader 16 from sas.sascalc.dataloader.loader_exceptions import FileContentsException 17 18 # Look for unit converter 19 has_converter = True 20 try: 21 from sas.sascalc.data_util.nxsunit import Converter 22 except: 23 has_converter = False 16 17 from sas.sascalc.data_util.nxsunit import Converter 18 19 from ..data_info import plottable_2D, DataInfo, Detector 20 from ..file_reader_base_class import FileReader 21 from ..loader_exceptions import FileContentsException 24 22 25 23 … … 31 29 try: 32 30 return float(x_point) 33 except :31 except Exception: 34 32 return 0 35 33 … … 51 49 :param data: data2D 52 50 """ 53 import time54 51 # Write the file 55 52 try: … … 72 69 def get_file_contents(self): 73 70 # Read file 74 buf = self. f_open.read()71 buf = self.readall() 75 72 self.f_open.close() 76 73 # Instantiate data object … … 119 116 try: 120 117 wavelength = float(line_toks[1]) 121 # Units 122 if has_converter == True and \ 123 self.current_datainfo.source.wavelength_unit != 'A': 118 # Wavelength is stored in angstroms; convert if necessary 119 if self.current_datainfo.source.wavelength_unit != 'A': 124 120 conv = Converter('A') 125 121 wavelength = conv(wavelength, 126 122 units=self.current_datainfo.source.wavelength_unit) 127 except: 128 #Not required 129 pass 130 # Distance in mm 123 except Exception: 124 pass # Not required 131 125 try: 132 126 distance = float(line_toks[3]) 133 # Units134 if has_converter == True andself.current_datainfo.detector[0].distance_unit != 'm':127 # Distance is stored in meters; convert if necessary 128 if self.current_datainfo.detector[0].distance_unit != 'm': 135 129 conv = Converter('m') 136 130 distance = conv(distance, 137 131 units=self.current_datainfo.detector[0].distance_unit) 138 except: 139 #Not required 140 pass 141 142 # Distance in meters 132 except Exception: 133 pass # Not required 134 143 135 try: 144 136 transmission = float(line_toks[4]) 145 except: 146 #Not required 147 pass 137 except Exception: 138 pass # Not required 148 139 149 140 if line.count("LAMBDA") > 0: … … 170 161 171 162 ## Read and get data. 172 if data_started == True:163 if data_started: 173 164 line_toks = line.split() 174 165 if len(line_toks) == 0: … … 178 169 col_num = len(line_toks) 179 170 break 171 180 172 # Make numpy array to remove header lines using index 181 173 lines_array = np.array(lines) … … 203 195 # Change it(string) into float 204 196 #data_list = map(float,data_list) 205 data_list1 = map(check_point, data_list)197 data_list1 = list(map(check_point, data_list)) 206 198 207 199 # numpy array form … … 211 203 try: 212 204 data_point = data_array.reshape(row_num, col_num).transpose() 213 except :205 except Exception: 214 206 msg = "red2d_reader can't read this file: Incorrect number of data points provided." 215 207 raise FileContentsException(msg) … … 325 317 326 318 # Units of axes 327 self.current_dataset.xaxis( "\\rm{Q_{x}}", 'A^{-1}')328 self.current_dataset.yaxis( "\\rm{Q_{y}}", 'A^{-1}')329 self.current_dataset.zaxis( "\\rm{Intensity}", "cm^{-1}")319 self.current_dataset.xaxis(r"\rm{Q_{x}}", 'A^{-1}') 320 self.current_dataset.yaxis(r"\rm{Q_{y}}", 'A^{-1}') 321 self.current_dataset.zaxis(r"\rm{Intensity}", "cm^{-1}") 330 322 331 323 # Store loading process information -
src/sas/sascalc/dataloader/readers/sesans_reader.py
rbe43448 r849094a 6 6 Jurrian Bakker 7 7 """ 8 import os 9 8 10 import numpy as np 9 import os 10 from sas.sascalc.dataloader.file_reader_base_class import FileReader11 from sas.sascalc.dataloader.data_info import plottable_1D, DataInfo12 from sas.sascalc.dataloader.loader_exceptions import FileContentsException, DataReaderException11 12 from ..file_reader_base_class import FileReader 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException, DataReaderException 13 15 14 16 # Check whether we have a converter available … … 42 44 self.output = [] 43 45 44 line = self. f_open.readline()46 line = self.nextline() 45 47 params = {} 46 48 while not line.startswith("BEGIN_DATA"): … … 48 50 if len(terms) >= 2: 49 51 params[terms[0]] = " ".join(terms[1:]) 50 line = self. f_open.readline()52 line = self.nextline() 51 53 self.params = params 52 54 … … 68 70 "handled by other software.") 69 71 70 headers = self. f_open.readline().split()72 headers = self.nextline().split() 71 73 72 74 self._insist_header(headers, "SpinEchoLength") -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r959eb01 r574adc7 2 2 #This software was developed by the University of Tennessee as part of the 3 3 #Distributed Data Analysis of Neutron Scattering Experiments (DANSE) 4 #project funded by the US National Science Foundation. 4 #project funded by the US National Science Foundation. 5 5 #See the license text in license.txt 6 6 #copyright 2008, University of Tennessee … … 31 31 ## Extension 32 32 ext = ['.tif', '.tiff'] 33 33 34 34 def read(self, filename=None): 35 35 """ 36 36 Open and read the data in a file 37 37 38 38 :param file: path of the file 39 39 """ … … 44 44 except: 45 45 msg = "tiff_reader: could not load file. Missing Image module." 46 raise RuntimeError , msg47 46 raise RuntimeError(msg) 47 48 48 # Instantiate data object 49 49 output = Data2D() 50 50 output.filename = os.path.basename(filename) 51 51 52 52 # Read in the image 53 53 try: 54 54 im = Image.open(filename) 55 55 except: 56 raise RuntimeError , "cannot open %s"%(filename)56 raise RuntimeError("cannot open %s"%(filename)) 57 57 data = im.getdata() 58 58 … … 61 61 output.err_data = np.zeros([im.size[0], im.size[1]]) 62 62 output.mask = np.ones([im.size[0], im.size[1]], dtype=bool) 63 63 64 64 # Initialize 65 65 x_vals = [] … … 69 69 for i_x in range(im.size[0]): 70 70 x_vals.append(i_x) 71 71 72 72 itot = 0 73 73 for i_y in range(im.size[1]): … … 80 80 logger.error("tiff_reader: had to skip a non-float point") 81 81 continue 82 82 83 83 # Get bin number 84 84 if math.fmod(itot, im.size[0]) == 0: … … 87 87 else: 88 88 i_x += 1 89 89 90 90 output.data[im.size[1] - 1 - i_y][i_x] = value 91 91 92 92 itot += 1 93 93 94 94 output.xbins = im.size[0] 95 95 output.ybins = im.size[1] … … 102 102 output.ymin = 0 103 103 output.ymax = im.size[0] - 1 104 104 105 105 # Store loading process information 106 106 output.meta_data['loader'] = self.type_name -
src/sas/sascalc/dataloader/readers/xml_reader.py
rcd57c7d4 r7b50f14 16 16 17 17 import logging 18 18 19 from lxml import etree 19 20 from lxml.builder import E 20 from sas.sascalc.dataloader.file_reader_base_class import FileReader 21 22 from ..file_reader_base_class import FileReader, decode 21 23 22 24 logger = logging.getLogger(__name__) … … 151 153 Converts an etree element into a string 152 154 """ 153 return etree.tostring(elem, pretty_print=pretty_print, \154 encoding=encoding)155 return decode(etree.tostring(elem, pretty_print=pretty_print, 156 encoding=encoding)) 155 157 156 158 def break_processing_instructions(self, string, dic):
Note: See TracChangeset
for help on using the changeset viewer.