- Timestamp:
- Sep 13, 2018 9:33:22 AM (6 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, unittest-saveload
- Children:
- feec1cb
- Parents:
- 4fdcc65
- Location:
- src/sas/sascalc
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r4fdcc65 r2ca5d57b 131 131 value = data.get(key) 132 132 class_name = h5attr(value, u'canSAS_class') 133 if isinstance(class_name, (list, tuple, np.ndarray)): 134 class_name = class_name[0] 133 135 if class_name is None: 134 136 class_name = h5attr(value, u'NX_class') … … 538 540 if dataset.data.ndim == 2: 539 541 (n_rows, n_cols) = dataset.data.shape 540 print(n_rows)541 print(n_cols)542 542 flat_qy = dataset.qy_data[0::n_cols].flatten() 543 543 if flat_qy[0] == flat_qy[1]: … … 548 548 flat_qx = np.transpose(dataset.qx_data)[0::n_rows].flatten() 549 549 dataset.x_bins = np.unique(flat_qx) 550 print(dataset.x_bins)551 print(len(dataset.x_bins))552 print(dataset.y_bins)553 print(len(dataset.y_bins))554 550 dataset.data = dataset.data.flatten() 555 551 dataset.qx_data = dataset.qx_data.flatten() -
src/sas/sascalc/file_converter/nxcansas_writer.py
r4fdcc65 r2ca5d57b 8 8 import os 9 9 10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader as Cansas2Reader10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader 11 11 from sas.sascalc.dataloader.data_info import Data1D, Data2D 12 12 13 class NXcanSASWriter( Cansas2Reader):13 class NXcanSASWriter(Reader): 14 14 """ 15 15 A class for writing in NXcanSAS data files. Any number of data sets may be … … 87 87 entry[names[2]].attrs['units'] = units 88 88 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in dataset]) 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in 90 dataset]) 90 91 if not valid_data: 91 raise ValueError("All entries of dataset must be Data1D or Data2D objects") 92 raise ValueError("All entries of dataset must be Data1D or Data2D" 93 "objects") 92 94 93 95 # Get run name and number from first Data object … … 145 147 sample_entry.create_dataset('details', data=details) 146 148 147 # Instrum ment metadata149 # Instrument metadata 148 150 instrument_entry = sasentry.create_group('sasinstrument') 149 151 instrument_entry.attrs['canSAS_class'] = 'SASinstrument' … … 175 177 # Collimation metadata 176 178 if len(data_info.collimation) > 0: 177 i = 1 178 for coll_info in data_info.collimation: 179 for i, coll_info in enumerate(data_info.collimation): 179 180 collimation_entry = instrument_entry.create_group( 180 'sascollimation{0:0=2d}'.format(i ))181 'sascollimation{0:0=2d}'.format(i + 1)) 181 182 collimation_entry.attrs['canSAS_class'] = 'SAScollimation' 182 183 if coll_info.length is not None: 183 184 _write_h5_float(collimation_entry, coll_info.length, 'SDD') 184 collimation_entry['SDD'].attrs['units'] = coll_info.length_unit 185 collimation_entry['SDD'].attrs['units'] =\ 186 coll_info.length_unit 185 187 if coll_info.name is not None: 186 188 collimation_entry['name'] = _h5_string(coll_info.name) 187 189 else: 188 # Create a blank one - at least 1 set of collimation metadata 189 # required by format 190 collimation_entry = instrument_entry.create_group('sascollimation01') 190 # Create a blank one - at least 1 collimation required by format 191 instrument_entry.create_group('sascollimation01') 191 192 192 193 # Detector metadata 193 194 if len(data_info.detector) > 0: 194 195 i = 1 195 for det_info in data_info.detector:196 for i, det_info in enumerate(data_info.detector): 196 197 detector_entry = instrument_entry.create_group( 197 'sasdetector{0:0=2d}'.format(i ))198 'sasdetector{0:0=2d}'.format(i + 1)) 198 199 detector_entry.attrs['canSAS_class'] = 'SASdetector' 199 200 if det_info.distance is not None: 200 201 _write_h5_float(detector_entry, det_info.distance, 'SDD') 201 detector_entry['SDD'].attrs['units'] = det_info.distance_unit 202 detector_entry['SDD'].attrs['units'] =\ 203 det_info.distance_unit 202 204 if det_info.name is not None: 203 205 detector_entry['name'] = _h5_string(det_info.name) … … 205 207 detector_entry['name'] = _h5_string('') 206 208 if det_info.slit_length is not None: 207 _write_h5_float(detector_entry, det_info.slit_length, 'slit_length') 208 detector_entry['slit_length'].attrs['units'] = det_info.slit_length_unit 209 _write_h5_float(detector_entry, det_info.slit_length, 210 'slit_length') 211 detector_entry['slit_length'].attrs['units'] =\ 212 det_info.slit_length_unit 209 213 _write_h5_vector(detector_entry, det_info.offset) 210 214 # NXcanSAS doesn't save information about pitch, only roll … … 220 224 names=['x_pixel_size', 'y_pixel_size'], 221 225 write_fn=_write_h5_float, units=det_info.pixel_size_unit) 222 223 i += 1224 226 else: 225 227 # Create a blank one - at least 1 detector required by format … … 229 231 230 232 # Process meta data 231 if len(data_info.process) > 0 and not data_info.process[0].is_empty(): 232 i = 1 233 for process in data_info.process: 234 process_entry = sasentry.create_group( 235 'sasprocess{0:0=2d}'.format(i)) 236 process_entry.attrs['canSAS_class'] = 'SASprocess' 237 if process.name: 238 name = _h5_string(process.name) 239 process_entry.create_dataset('name', data=name) 240 if process.date: 241 date = _h5_string(process.date) 242 process_entry.create_dataset('date', data=date) 243 if process.description: 244 desc = _h5_string(process.description) 245 process_entry.create_dataset('description', data=desc) 246 j = 1 247 for term in process.term: 248 if term: 249 h5_term = _h5_string(term) 250 process_entry.create_dataset('term{0:0=2d}'.format(j), 251 data=h5_term) 252 j += 1 253 j = 1 254 for note in process.notes: 255 if note: 256 h5_note = _h5_string(note) 257 process_entry.create_dataset('note{0:0=2d}'.format(j), 258 data=h5_note) 259 j += 1 260 i += 1 233 for i, process in enumerate(data_info.process): 234 process_entry = sasentry.create_group('sasprocess{0:0=2d}'.format( 235 i + 1)) 236 process_entry.attrs['canSAS_class'] = 'SASprocess' 237 if process.name: 238 name = _h5_string(process.name) 239 process_entry.create_dataset('name', data=name) 240 if process.date: 241 date = _h5_string(process.date) 242 process_entry.create_dataset('date', data=date) 243 if process.description: 244 desc = _h5_string(process.description) 245 process_entry.create_dataset('description', data=desc) 246 for j, term in enumerate(process.term): 247 # Don't save empty terms 248 if term: 249 h5_term = _h5_string(term) 250 process_entry.create_dataset('term{0:0=2d}'.format( 251 j + 1), data=h5_term) 252 for j, note in enumerate(process.notes): 253 # Don't save empty notes 254 if note: 255 h5_note = _h5_string(note) 256 process_entry.create_dataset('note{0:0=2d}'.format( 257 j + 1), data=h5_note) 261 258 262 259 # Transmission Spectrum 263 if len(data_info.trans_spectrum) > 0: 264 i = 1 265 for trans in data_info.trans_spectrum: 266 trans_entry = sasentry.create_group( 267 'sastransmission_spectrum{0:0=2d}'.format(i)) 268 trans_entry.attrs['canSAS_class'] = 'SAStransmission_spectrum' 269 trans_entry.attrs['signal'] = 'T' 270 trans_entry.attrs['T_axes'] = 'T' 271 trans_entry.attrs['name'] = trans.name 272 if trans.timestamp is not '': 273 trans_entry.attrs['timestamp'] = trans.timestamp 274 transmission = trans_entry.create_dataset( 275 'T', data=trans.transmission) 276 transmission.attrs['unertainties'] = 'Tdev' 277 trans_entry.create_dataset('Tdev', 278 data = trans.transmission_deviation) 279 trans_entry.create_dataset('lambda', data=trans.wavelength) 260 for i, trans in enumerate(data_info.trans_spectrum): 261 trans_entry = sasentry.create_group( 262 'sastransmission_spectrum{0:0=2d}'.format(i + 1)) 263 trans_entry.attrs['canSAS_class'] = 'SAStransmission_spectrum' 264 trans_entry.attrs['signal'] = 'T' 265 trans_entry.attrs['T_axes'] = 'T' 266 trans_entry.attrs['name'] = trans.name 267 if trans.timestamp is not '': 268 trans_entry.attrs['timestamp'] = trans.timestamp 269 transmission = trans_entry.create_dataset('T', 270 data=trans.transmission) 271 transmission.attrs['unertainties'] = 'Tdev' 272 trans_entry.create_dataset('Tdev', 273 data=trans.transmission_deviation) 274 trans_entry.create_dataset('lambda', data=trans.wavelength) 280 275 281 276 note_entry = sasentry.create_group('sasnote'.format(i))
Note: See TracChangeset
for help on using the changeset viewer.