Changeset 4fdcc65 in sasview
- Timestamp:
- Sep 12, 2018 3:13:34 PM (6 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, unittest-saveload
- Children:
- 2ca5d57b
- Parents:
- 8d5e11c
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r8d5e11c r4fdcc65 991 991 clone._yunit = self._yunit 992 992 clone._zunit = self._zunit 993 clone.x_bins = self.x_bins 994 clone.y_bins = self.y_bins 993 995 994 996 clone.title = self.title … … 1186 1188 final_dataset.yaxis(data._yaxis, data._yunit) 1187 1189 final_dataset.zaxis(data._zaxis, data._zunit) 1188 if len(data.data.shape) == 2: 1189 n_rows, n_cols = data.data.shape 1190 final_dataset.y_bins = data.qy_data[0::int(n_cols)] 1191 final_dataset.x_bins = data.qx_data[:int(n_cols)] 1190 final_dataset.y_bins = data.y_bins 1191 final_dataset.x_bins = data.x_bins 1192 1192 else: 1193 1193 return_string = ("Should Never Happen: _combine_data_info_with_plottabl" -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r9dc1500 r4fdcc65 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from ..data_info import combine_data_info_with_plottable15 14 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 15 from ..file_reader_base_class import FileReader, decode 17 16 17 18 18 def h5attr(node, key, default=None): 19 19 return decode(node.attrs.get(key, default)) 20 20 21 21 22 class Reader(FileReader): … … 71 72 except Exception as e: 72 73 if extension not in self.ext: 73 msg = "NXcanSAS HDF5 Reader could not load file {}".format(basename + extension) 74 msg = "NXcanSAS Reader could not load file {}".format( 75 basename + extension) 74 76 raise DefaultReaderException(msg) 75 77 raise FileContentsException(e.message) … … 85 87 self.raw_data.close() 86 88 87 for dataset in self.output: 88 if isinstance(dataset, Data1D): 89 if dataset.x.size < 5: 90 self.output = [] 91 raise FileContentsException("Fewer than 5 data points found.") 89 for data_set in self.output: 90 if isinstance(data_set, Data1D): 91 if data_set.x.size < 5: 92 exception = FileContentsException( 93 "Fewer than 5 data points found.") 94 data_set.errors.append(exception) 92 95 93 96 def reset_state(self): … … 105 108 self.i_name = u'' 106 109 self.i_node = u'' 107 self.q_uncertainties = []108 self.q_resolutions = []110 self.q_uncertainties = None 111 self.q_resolutions = None 109 112 self.i_uncertainties = u'' 110 113 self.parent_class = u'' … … 257 260 def process_2d_data_object(self, data_set, key, unit): 258 261 if key == self.i_name: 259 self.current_dataset.x_bins, self.current_dataset.y_bins = \ 260 data_set.shape 261 self.current_dataset.data = data_set.flatten() 262 self.current_dataset.data = data_set 262 263 self.current_dataset.zaxis("Intensity", unit) 263 264 elif key == self.i_uncertainties: … … 268 269 if self.q_name[0] == self.q_name[1]: 269 270 # All q data in a single array 270 self.current_dataset.qx_data = data_set[0] .flatten()271 self.current_dataset.qy_data = data_set[1] .flatten()271 self.current_dataset.qx_data = data_set[0] 272 self.current_dataset.qy_data = data_set[1] 272 273 elif self.q_name.index(key) == 0: 273 self.current_dataset.qx_data = data_set .flatten()274 self.current_dataset.qx_data = data_set 274 275 elif self.q_name.index(key) == 1: 275 self.current_dataset.qy_data = data_set .flatten()276 self.current_dataset.qy_data = data_set 276 277 elif key in self.q_uncertainties or key in self.q_resolutions: 277 278 if ((self.q_uncertainties[0] == self.q_uncertainties[1]) or … … 537 538 if dataset.data.ndim == 2: 538 539 (n_rows, n_cols) = dataset.data.shape 539 dataset.y_bins = dataset.qy_data[0::n_cols] 540 dataset.x_bins = dataset.qx_data[0::n_rows] 540 print(n_rows) 541 print(n_cols) 542 flat_qy = dataset.qy_data[0::n_cols].flatten() 543 if flat_qy[0] == flat_qy[1]: 544 flat_qy = np.transpose(dataset.qy_data)[0::n_cols].flatten() 545 dataset.y_bins = np.unique(flat_qy) 546 flat_qx = dataset.qx_data[0::n_rows].flatten() 547 if flat_qx[0] == flat_qx[1]: 548 flat_qx = np.transpose(dataset.qx_data)[0::n_rows].flatten() 549 dataset.x_bins = np.unique(flat_qx) 550 print(dataset.x_bins) 551 print(len(dataset.x_bins)) 552 print(dataset.y_bins) 553 print(len(dataset.y_bins)) 541 554 dataset.data = dataset.data.flatten() 555 dataset.qx_data = dataset.qx_data.flatten() 556 dataset.qy_data = dataset.qy_data.flatten() 542 557 self.current_dataset = dataset 543 558 self.send_to_output() … … 589 604 try: 590 605 iter(iterable) 591 if (not isinstance(iterable, np.ndarray)) or (isinstance(iterable, str) 592 or isinstance(iterable, unicode)): 606 if (not isinstance(iterable, np.ndarray) and not isinstance( 607 iterable, list)) or (isinstance(iterable, str) or 608 isinstance(iterable, unicode)): 593 609 raise TypeError 594 610 except TypeError: -
src/sas/sascalc/file_converter/nxcansas_writer.py
rcf29187 r4fdcc65 109 109 sasentry.attrs['version'] = '1.0' 110 110 111 i = 1 112 113 for data_obj in dataset: 114 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i)) 111 for i, data_obj in enumerate(dataset): 112 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i+1)) 115 113 data_entry.attrs['canSAS_class'] = 'SASdata' 116 114 if isinstance(data_obj, Data1D): … … 118 116 elif isinstance(data_obj, Data2D): 119 117 self._write_2d_data(data_obj, data_entry) 120 i += 1121 118 122 119 data_info = dataset[0]
Note: See TracChangeset
for help on using the changeset viewer.