Changeset c222c27 in sasview for src/sas/sascalc/file_converter
- Timestamp:
- Nov 15, 2018 2:09:18 PM (6 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249
- Children:
- 9220e89c
- Parents:
- a165bee (diff), f560e23 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/file_converter
- Files:
-
- 1 deleted
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/file_converter/bsl_loader.py
rf00691d4 r952ea1f 1 from sas.sascalc.file_converter. core.bsl_loader import CLoader1 from sas.sascalc.file_converter._bsl_loader import CLoader 2 2 from sas.sascalc.dataloader.data_info import Data2D 3 3 from copy import deepcopy … … 67 67 'swap_bytes': int(metadata[3]) 68 68 } 69 except :69 except Exception: 70 70 is_valid = False 71 71 err_msg = "Invalid metadata in header file for {}" -
src/sas/sascalc/file_converter/c_ext/bsl_loader.c
rd5aeaa3 r952ea1f 1 #include <stdio.h> 2 #include <stdlib.h> 3 4 //#define Py_LIMITED_API 0x03020000 1 5 #include <Python.h> 6 #include <structmember.h> 2 7 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 3 8 #include <numpy/arrayobject.h> 4 #include <stdio.h> 5 #include <stdlib.h> 6 #include "structmember.h" 9 7 10 #include "bsl_loader.h" 8 11 … … 292 295 293 296 #define MODULE_DOC "C module for loading bsl." 294 #define MODULE_NAME " bsl_loader"295 #define MODULE_INIT2 init bsl_loader296 #define MODULE_INIT3 PyInit_ bsl_loader297 #define MODULE_NAME "_bsl_loader" 298 #define MODULE_INIT2 init_bsl_loader 299 #define MODULE_INIT3 PyInit__bsl_loader 297 300 #define MODULE_METHODS module_methods 298 301 -
src/sas/sascalc/file_converter/nxcansas_writer.py
r574adc7 r2ca5d57b 8 8 import os 9 9 10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader as Cansas2Reader10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader 11 11 from sas.sascalc.dataloader.data_info import Data1D, Data2D 12 12 13 class NXcanSASWriter( Cansas2Reader):13 class NXcanSASWriter(Reader): 14 14 """ 15 15 A class for writing in NXcanSAS data files. Any number of data sets may be … … 87 87 entry[names[2]].attrs['units'] = units 88 88 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in dataset]) 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in 90 dataset]) 90 91 if not valid_data: 91 raise ValueError("All entries of dataset must be Data1D or Data2D objects") 92 raise ValueError("All entries of dataset must be Data1D or Data2D" 93 "objects") 92 94 93 95 # Get run name and number from first Data object … … 109 111 sasentry.attrs['version'] = '1.0' 110 112 111 i = 1 112 113 for data_obj in dataset: 114 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i)) 113 for i, data_obj in enumerate(dataset): 114 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i+1)) 115 115 data_entry.attrs['canSAS_class'] = 'SASdata' 116 116 if isinstance(data_obj, Data1D): … … 118 118 elif isinstance(data_obj, Data2D): 119 119 self._write_2d_data(data_obj, data_entry) 120 i += 1121 120 122 121 data_info = dataset[0] … … 148 147 sample_entry.create_dataset('details', data=details) 149 148 150 # Instrum ment metadata149 # Instrument metadata 151 150 instrument_entry = sasentry.create_group('sasinstrument') 152 151 instrument_entry.attrs['canSAS_class'] = 'SASinstrument' … … 176 175 units=data_info.source.beam_size_unit, write_fn=_write_h5_float) 177 176 178 179 177 # Collimation metadata 180 178 if len(data_info.collimation) > 0: 181 i = 1 182 for coll_info in data_info.collimation: 179 for i, coll_info in enumerate(data_info.collimation): 183 180 collimation_entry = instrument_entry.create_group( 184 'sascollimation{0:0=2d}'.format(i ))181 'sascollimation{0:0=2d}'.format(i + 1)) 185 182 collimation_entry.attrs['canSAS_class'] = 'SAScollimation' 186 183 if coll_info.length is not None: 187 184 _write_h5_float(collimation_entry, coll_info.length, 'SDD') 188 collimation_entry['SDD'].attrs['units'] = coll_info.length_unit 185 collimation_entry['SDD'].attrs['units'] =\ 186 coll_info.length_unit 189 187 if coll_info.name is not None: 190 188 collimation_entry['name'] = _h5_string(coll_info.name) 191 189 else: 192 # Create a blank one - at least 1 set of collimation metadata 193 # required by format 194 collimation_entry = instrument_entry.create_group('sascollimation01') 190 # Create a blank one - at least 1 collimation required by format 191 instrument_entry.create_group('sascollimation01') 195 192 196 193 # Detector metadata 197 194 if len(data_info.detector) > 0: 198 195 i = 1 199 for det_info in data_info.detector:196 for i, det_info in enumerate(data_info.detector): 200 197 detector_entry = instrument_entry.create_group( 201 'sasdetector{0:0=2d}'.format(i ))198 'sasdetector{0:0=2d}'.format(i + 1)) 202 199 detector_entry.attrs['canSAS_class'] = 'SASdetector' 203 200 if det_info.distance is not None: 204 201 _write_h5_float(detector_entry, det_info.distance, 'SDD') 205 detector_entry['SDD'].attrs['units'] = det_info.distance_unit 202 detector_entry['SDD'].attrs['units'] =\ 203 det_info.distance_unit 206 204 if det_info.name is not None: 207 205 detector_entry['name'] = _h5_string(det_info.name) … … 209 207 detector_entry['name'] = _h5_string('') 210 208 if det_info.slit_length is not None: 211 _write_h5_float(detector_entry, det_info.slit_length, 'slit_length') 212 detector_entry['slit_length'].attrs['units'] = det_info.slit_length_unit 209 _write_h5_float(detector_entry, det_info.slit_length, 210 'slit_length') 211 detector_entry['slit_length'].attrs['units'] =\ 212 det_info.slit_length_unit 213 213 _write_h5_vector(detector_entry, det_info.offset) 214 214 # NXcanSAS doesn't save information about pitch, only roll … … 224 224 names=['x_pixel_size', 'y_pixel_size'], 225 225 write_fn=_write_h5_float, units=det_info.pixel_size_unit) 226 227 i += 1228 226 else: 229 227 # Create a blank one - at least 1 detector required by format … … 231 229 detector_entry.attrs['canSAS_class'] = 'SASdetector' 232 230 detector_entry.attrs['name'] = '' 231 232 # Process meta data 233 for i, process in enumerate(data_info.process): 234 process_entry = sasentry.create_group('sasprocess{0:0=2d}'.format( 235 i + 1)) 236 process_entry.attrs['canSAS_class'] = 'SASprocess' 237 if process.name: 238 name = _h5_string(process.name) 239 process_entry.create_dataset('name', data=name) 240 if process.date: 241 date = _h5_string(process.date) 242 process_entry.create_dataset('date', data=date) 243 if process.description: 244 desc = _h5_string(process.description) 245 process_entry.create_dataset('description', data=desc) 246 for j, term in enumerate(process.term): 247 # Don't save empty terms 248 if term: 249 h5_term = _h5_string(term) 250 process_entry.create_dataset('term{0:0=2d}'.format( 251 j + 1), data=h5_term) 252 for j, note in enumerate(process.notes): 253 # Don't save empty notes 254 if note: 255 h5_note = _h5_string(note) 256 process_entry.create_dataset('note{0:0=2d}'.format( 257 j + 1), data=h5_note) 258 259 # Transmission Spectrum 260 for i, trans in enumerate(data_info.trans_spectrum): 261 trans_entry = sasentry.create_group( 262 'sastransmission_spectrum{0:0=2d}'.format(i + 1)) 263 trans_entry.attrs['canSAS_class'] = 'SAStransmission_spectrum' 264 trans_entry.attrs['signal'] = 'T' 265 trans_entry.attrs['T_axes'] = 'T' 266 trans_entry.attrs['name'] = trans.name 267 if trans.timestamp is not '': 268 trans_entry.attrs['timestamp'] = trans.timestamp 269 transmission = trans_entry.create_dataset('T', 270 data=trans.transmission) 271 transmission.attrs['unertainties'] = 'Tdev' 272 trans_entry.create_dataset('Tdev', 273 data=trans.transmission_deviation) 274 trans_entry.create_dataset('lambda', data=trans.wavelength) 233 275 234 276 note_entry = sasentry.create_group('sasnote'.format(i)) … … 254 296 data_entry.attrs['signal'] = 'I' 255 297 data_entry.attrs['I_axes'] = 'Q' 256 data_entry.attrs['I_uncertainties'] = 'Idev' 257 data_entry.attrs['Q_indicies'] = 0 258 259 dI = data_obj.dy 260 if dI is None: 261 dI = np.zeros((data_obj.y.shape)) 262 263 data_entry.create_dataset('Q', data=data_obj.x) 264 data_entry.create_dataset('I', data=data_obj.y) 265 data_entry.create_dataset('Idev', data=dI) 298 data_entry.attrs['Q_indices'] = [0] 299 q_entry = data_entry.create_dataset('Q', data=data_obj.x) 300 q_entry.attrs['units'] = data_obj.x_unit 301 i_entry = data_entry.create_dataset('I', data=data_obj.y) 302 i_entry.attrs['units'] = data_obj.y_unit 303 if data_obj.dy is not None: 304 i_entry.attrs['uncertainties'] = 'Idev' 305 i_dev_entry = data_entry.create_dataset('Idev', data=data_obj.dy) 306 i_dev_entry.attrs['units'] = data_obj.y_unit 307 if data_obj.dx is not None: 308 q_entry.attrs['resolutions'] = 'dQ' 309 dq_entry = data_entry.create_dataset('dQ', data=data_obj.dx) 310 dq_entry.attrs['units'] = data_obj.x_unit 311 elif data_obj.dxl is not None: 312 q_entry.attrs['resolutions'] = ['dQl','dQw'] 313 dql_entry = data_entry.create_dataset('dQl', data=data_obj.dxl) 314 dql_entry.attrs['units'] = data_obj.x_unit 315 dqw_entry = data_entry.create_dataset('dQw', data=data_obj.dxw) 316 dqw_entry.attrs['units'] = data_obj.x_unit 266 317 267 318 def _write_2d_data(self, data, data_entry): … … 273 324 """ 274 325 data_entry.attrs['signal'] = 'I' 275 data_entry.attrs['I_axes'] = 'Q,Q' 276 data_entry.attrs['I_uncertainties'] = 'Idev' 277 data_entry.attrs['Q_indicies'] = [0,1] 326 data_entry.attrs['I_axes'] = 'Qx,Qy' 327 data_entry.attrs['Q_indices'] = [0,1] 278 328 279 329 (n_rows, n_cols) = (len(data.y_bins), len(data.x_bins)) … … 288 338 raise ValueError("Unable to calculate dimensions of 2D data") 289 339 290 I = np.reshape(data.data, (n_rows, n_cols)) 291 dI = np.zeros((n_rows, n_cols)) 292 if not all(data.err_data == [None]): 293 dI = np.reshape(data.err_data, (n_rows, n_cols)) 294 qx = np.reshape(data.qx_data, (n_rows, n_cols)) 340 intensity = np.reshape(data.data, (n_rows, n_cols)) 341 qx = np.reshape(data.qx_data, (n_rows, n_cols)) 295 342 qy = np.reshape(data.qy_data, (n_rows, n_cols)) 296 343 297 I_entry = data_entry.create_dataset('I', data=I) 298 I_entry.attrs['units'] = data.I_unit 299 Qx_entry = data_entry.create_dataset('Qx', data=qx) 300 Qx_entry.attrs['units'] = data.Q_unit 301 Qy_entry = data_entry.create_dataset('Qy', data=qy) 302 Qy_entry.attrs['units'] = data.Q_unit 303 Idev_entry = data_entry.create_dataset('Idev', data=dI) 304 Idev_entry.attrs['units'] = data.I_unit 344 i_entry = data_entry.create_dataset('I', data=intensity) 345 i_entry.attrs['units'] = data.I_unit 346 qx_entry = data_entry.create_dataset('Qx', data=qx) 347 qx_entry.attrs['units'] = data.Q_unit 348 qy_entry = data_entry.create_dataset('Qy', data=qy) 349 qy_entry.attrs['units'] = data.Q_unit 350 if data.err_data is not None and not all(data.err_data == [None]): 351 d_i = np.reshape(data.err_data, (n_rows, n_cols)) 352 i_entry.attrs['uncertainties'] = 'Idev' 353 i_dev_entry = data_entry.create_dataset('Idev', data=d_i) 354 i_dev_entry.attrs['units'] = data.I_unit 355 if data.dqx_data is not None and not all(data.dqx_data == [None]): 356 qx_entry.attrs['resolutions'] = 'dQx' 357 dqx_entry = data_entry.create_dataset('dQx', data=data.dqx_data) 358 dqx_entry.attrs['units'] = data.Q_unit 359 if data.dqy_data is not None and not all(data.dqy_data == [None]): 360 qy_entry.attrs['resolutions'] = 'dQy' 361 dqy_entry = data_entry.create_dataset('dQy', data=data.dqy_data) 362 dqy_entry.attrs['units'] = data.Q_unit
Note: See TracChangeset
for help on using the changeset viewer.