Changeset 9d786e5 in sasview for src/sas/sascalc
- Timestamp:
- Jul 27, 2017 11:08:18 AM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 8dec7e7
- Parents:
- 0b79323
- Location:
- src/sas/sascalc/dataloader
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/file_reader_base_class.py
r0b79323 r9d786e5 59 59 elif isinstance(self.output[0], Data2D): 60 60 self.sort_two_d_data() 61 61 62 62 except DataReaderException as e: 63 63 self.handle_error_message(e.message) … … 107 107 # Sort data by increasing x and remove 1st point 108 108 ind = np.lexsort((data.y, data.x)) 109 ind = ind[1:] # Remove 1st point (Q, I) = (0, 0) 110 data.x = np.asarray([data.x[i] for i in ind]) 111 data.y = np.asarray([data.y[i] for i in ind]) 109 data.x = np.asarray([data.x[i] for i in ind]).astype(np.float64) 110 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64) 112 111 if data.dx is not None: 113 data.dx = np.asarray([data.dx[i] for i in ind]) 112 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64) 114 113 if data.dxl is not None: 115 data.dxl = np.asarray([data.dxl[i] for i in ind]) 114 data.dxl = np.asarray([data.dxl[i] for i in ind]).astype(np.float64) 116 115 if data.dxw is not None: 117 data.dxw = np.asarray([data.dxw[i] for i in ind]) 116 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64) 118 117 if data.dy is not None: 119 data.dy = np.asarray([data.dy[i] for i in ind]) 118 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64) 120 119 if data.lam is not None: 121 data.lam = np.asarray([data.lam[i] for i in ind]) 120 data.lam = np.asarray([data.lam[i] for i in ind]).astype(np.float64) 122 121 if data.dlam is not None: 123 data.dlam = np.asarray([data.dlam[i] for i in ind]) 122 data.dlam = np.asarray([data.dlam[i] for i in ind]).astype(np.float64) 124 123 data.xmin = np.min(data.x) 125 124 data.xmax = np.max(data.x) … … 128 127 final_list.append(data) 129 128 self.output = final_list 129 self.remove_empty_q_values() 130 130 131 131 def sort_two_d_data(self): 132 132 final_list = [] 133 133 for dataset in self.output: 134 dataset.data = dataset.data.astype(np.float64) 135 dataset.qx_data = dataset.qx_data.astype(np.float64) 136 dataset.xmin = np.min(dataset.qx_data) 137 dataset.xmax = np.max(dataset.qx_data) 138 dataset.qy_data = dataset.qy_data.astype(np.float64) 139 dataset.ymin = np.min(dataset.qy_data) 140 dataset.ymax = np.max(dataset.qy_data) 141 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 142 + dataset.qy_data * dataset.qy_data) 143 if dataset.err_data is not None: 144 dataset.err_data = dataset.err_data.astype(np.float64) 145 if dataset.dqx_data is not None: 146 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 147 if dataset.dqy_data is not None: 148 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 149 if dataset.mask is not None: 150 dataset.mask = dataset.mask.astype(dtype=bool) 151 152 if len(dataset.shape) == 2: 153 n_rows, n_cols = dataset.shape 154 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 155 dataset.x_bins = dataset.qx_data[:int(n_cols)] 156 dataset.data = dataset.data.flatten() 157 else: 158 dataset.y_bins = [] 159 dataset.x_bins = [] 160 dataset.data = dataset.data.flatten() 161 final_list.append(dataset) 134 if isinstance(dataset, Data2D): 135 dataset.data = dataset.data.astype(np.float64) 136 dataset.qx_data = dataset.qx_data.astype(np.float64) 137 dataset.xmin = np.min(dataset.qx_data) 138 dataset.xmax = np.max(dataset.qx_data) 139 dataset.qy_data = dataset.qy_data.astype(np.float64) 140 dataset.ymin = np.min(dataset.qy_data) 141 dataset.ymax = np.max(dataset.qy_data) 142 dataset.q_data = np.sqrt(dataset.qx_data * dataset.qx_data 143 + dataset.qy_data * dataset.qy_data) 144 if dataset.err_data is not None: 145 dataset.err_data = dataset.err_data.astype(np.float64) 146 if dataset.dqx_data is not None: 147 dataset.dqx_data = dataset.dqx_data.astype(np.float64) 148 if dataset.dqy_data is not None: 149 dataset.dqy_data = dataset.dqy_data.astype(np.float64) 150 if dataset.mask is not None: 151 dataset.mask = dataset.mask.astype(dtype=bool) 152 153 if len(dataset.data.shape) == 2: 154 n_rows, n_cols = dataset.data.shape 155 dataset.y_bins = dataset.qy_data[0::int(n_cols)] 156 dataset.x_bins = dataset.qx_data[:int(n_cols)] 157 dataset.data = dataset.data.flatten() 158 else: 159 dataset.y_bins = [] 160 dataset.x_bins = [] 161 dataset.data = dataset.data.flatten() 162 final_list.append(dataset) 162 163 self.output = final_list 163 164 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r7f75a3f r9d786e5 14 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 15 from sas.sascalc.dataloader.loader_exceptions import FileContentsException 16 17 18 class Reader(): 16 from sas.sascalc.dataloader.file_reader_base_class import FileReader 17 18 19 class Reader(FileReader): 19 20 """ 20 21 A class for reading in CanSAS v2.0 data files. The existing iteration opens … … 41 42 # Raw file contents to be processed 42 43 raw_data = None 43 # Data info currently being read in44 current_datainfo = None45 # SASdata set currently being read in46 current_dataset = None47 44 # List of plottable1D objects that should be linked to the current_datainfo 48 45 data1d = None … … 57 54 # Flag to bypass extension check 58 55 allow_all = True 59 # List of files to return 60 output = None 61 62 def read(self, filename): 56 57 def get_file_contents(self): 63 58 """ 64 59 This is the general read method that all SasView data_loaders must have. … … 69 64 # Reinitialize when loading a new data file to reset all class variables 70 65 self.reset_class_variables() 66 67 filename = self.f_open.name 68 self.f_open.close() # IO handled by h5py 69 71 70 # Check that the file exists 72 71 if os.path.isfile(filename): … … 431 430 Data1D and Data2D objects 432 431 """ 433 434 432 # Type cast data arrays to float64 435 433 if len(self.current_datainfo.trans_spectrum) > 0: … … 455 453 # Type cast data arrays to float64 and find min/max as appropriate 456 454 for dataset in self.data2d: 457 dataset.data = dataset.data.astype(np.float64)458 dataset.err_data = dataset.err_data.astype(np.float64)459 if dataset.qx_data is not None:460 dataset.xmin = np.min(dataset.qx_data)461 dataset.xmax = np.max(dataset.qx_data)462 dataset.qx_data = dataset.qx_data.astype(np.float64)463 if dataset.dqx_data is not None:464 dataset.dqx_data = dataset.dqx_data.astype(np.float64)465 if dataset.qy_data is not None:466 dataset.ymin = np.min(dataset.qy_data)467 dataset.ymax = np.max(dataset.qy_data)468 dataset.qy_data = dataset.qy_data.astype(np.float64)469 if dataset.dqy_data is not None:470 dataset.dqy_data = dataset.dqy_data.astype(np.float64)471 if dataset.q_data is not None:472 dataset.q_data = dataset.q_data.astype(np.float64)473 455 zeros = np.ones(dataset.data.size, dtype=bool) 474 456 try: … … 493 475 dataset.x_bins = dataset.qx_data[:n_cols] 494 476 dataset.data = dataset.data.flatten() 495 496 final_dataset = combine_data_info_with_plottable( 497 dataset, self.current_datainfo) 498 self.output.append(final_dataset) 477 self.current_dataset = dataset 478 self.send_to_output() 499 479 500 480 for dataset in self.data1d: 501 if dataset.x is not None: 502 dataset.x = dataset.x.astype(np.float64) 503 dataset.xmin = np.min(dataset.x) 504 dataset.xmax = np.max(dataset.x) 505 if dataset.y is not None: 506 dataset.y = dataset.y.astype(np.float64) 507 dataset.ymin = np.min(dataset.y) 508 dataset.ymax = np.max(dataset.y) 509 if dataset.dx is not None: 510 dataset.dx = dataset.dx.astype(np.float64) 511 if dataset.dxl is not None: 512 dataset.dxl = dataset.dxl.astype(np.float64) 513 if dataset.dxw is not None: 514 dataset.dxw = dataset.dxw.astype(np.float64) 515 if dataset.dy is not None: 516 dataset.dy = dataset.dy.astype(np.float64) 517 final_dataset = combine_data_info_with_plottable( 518 dataset, self.current_datainfo) 519 self.output.append(final_dataset) 481 self.current_dataset = dataset 482 self.send_to_output() 520 483 521 484 def add_data_set(self, key=""):
Note: See TracChangeset
for help on using the changeset viewer.