[eb98f24] | 1 | """ |
---|
| 2 | NXcanSAS 1/2D data reader for writing HDF5 formatted NXcanSAS files. |
---|
| 3 | """ |
---|
| 4 | |
---|
| 5 | import h5py |
---|
| 6 | import numpy as np |
---|
| 7 | import re |
---|
| 8 | import os |
---|
| 9 | |
---|
| 10 | from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader as Cansas2Reader |
---|
| 11 | from sas.sascalc.dataloader.data_info import Data1D, Data2D |
---|
| 12 | |
---|
| 13 | class NXcanSASWriter(Cansas2Reader): |
---|
| 14 | """ |
---|
| 15 | A class for writing in NXcanSAS data files. Any number of data sets may be |
---|
| 16 | written to the file. Currently 1D and 2D SAS data sets are supported |
---|
| 17 | |
---|
| 18 | NXcanSAS spec: http://download.nexusformat.org/sphinx/classes/contributed_definitions/NXcanSAS.html |
---|
| 19 | |
---|
| 20 | :Dependencies: |
---|
| 21 | The NXcanSAS writer requires h5py => v2.5.0 or later. |
---|
| 22 | """ |
---|
| 23 | |
---|
[fe498b83] | 24 | def write(self, dataset, filename): |
---|
[eb98f24] | 25 | """ |
---|
| 26 | Write an array of Data1d or Data2D objects to an NXcanSAS file, as |
---|
| 27 | one SASEntry with multiple SASData elements. The metadata of the first |
---|
| 28 | elememt in the array will be written as the SASentry metadata |
---|
| 29 | (detector, instrument, sample, etc). |
---|
| 30 | |
---|
| 31 | :param dataset: A list of Data1D or Data2D objects to write |
---|
| 32 | :param filename: Where to write the NXcanSAS file |
---|
| 33 | """ |
---|
| 34 | |
---|
| 35 | def _h5_string(string): |
---|
| 36 | """ |
---|
| 37 | Convert a string to a numpy string in a numpy array. This way it is |
---|
| 38 | written to the HDF5 file as a fixed length ASCII string and is |
---|
| 39 | compatible with the Reader read() method. |
---|
| 40 | """ |
---|
[7fce2bc] | 41 | if isinstance(string, np.ndarray): |
---|
| 42 | return string |
---|
| 43 | elif not isinstance(string, str): |
---|
[eb98f24] | 44 | string = str(string) |
---|
| 45 | |
---|
| 46 | return np.array([np.string_(string)]) |
---|
| 47 | |
---|
[ac3353d] | 48 | def _write_h5_string(entry, value, key): |
---|
| 49 | entry[key] = _h5_string(value) |
---|
| 50 | |
---|
[eb98f24] | 51 | def _h5_float(x): |
---|
| 52 | if not (isinstance(x, list)): |
---|
| 53 | x = [x] |
---|
| 54 | return np.array(x, dtype=np.float32) |
---|
| 55 | |
---|
[ac3353d] | 56 | def _write_h5_float(entry, value, key): |
---|
| 57 | entry.create_dataset(key, data=_h5_float(value)) |
---|
| 58 | |
---|
| 59 | def _write_h5_vector(entry, vector, names=['x_position', 'y_position'], |
---|
| 60 | units=None, write_fn=_write_h5_string): |
---|
[0836f77] | 61 | """ |
---|
| 62 | Write a vector to an h5 entry |
---|
| 63 | |
---|
| 64 | :param entry: The H5Py entry to write to |
---|
| 65 | :param vector: The Vector to write |
---|
| 66 | :param names: What to call the x,y and z components of the vector |
---|
| 67 | when writing to the H5Py entry |
---|
| 68 | :param units: The units of the vector (optional) |
---|
| 69 | :param write_fn: A function to convert the value to the required |
---|
| 70 | format and write it to the H5Py entry, of the form |
---|
| 71 | f(entry, value, name) (optional) |
---|
| 72 | """ |
---|
[ac3353d] | 73 | if len(names) < 2: |
---|
| 74 | raise ValueError("Length of names must be >= 2.") |
---|
| 75 | |
---|
| 76 | if vector.x is not None: |
---|
| 77 | write_fn(entry, vector.x, names[0]) |
---|
| 78 | if units is not None: |
---|
| 79 | entry[names[0]].attrs['units'] = units |
---|
| 80 | if vector.y is not None: |
---|
| 81 | write_fn(entry, vector.y, names[1]) |
---|
| 82 | if units is not None: |
---|
| 83 | entry[names[1]].attrs['units'] = units |
---|
| 84 | if len(names) == 3 and vector.z is not None: |
---|
| 85 | write_fn(entry, vector.z, names[2]) |
---|
| 86 | if units is not None: |
---|
| 87 | entry[names[2]].attrs['units'] = units |
---|
| 88 | |
---|
[eb98f24] | 89 | valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in dataset]) |
---|
| 90 | if not valid_data: |
---|
| 91 | raise ValueError("All entries of dataset must be Data1D or Data2D objects") |
---|
| 92 | |
---|
| 93 | # Get run name and number from first Data object |
---|
| 94 | data_info = dataset[0] |
---|
| 95 | run_number = '' |
---|
| 96 | run_name = '' |
---|
| 97 | if len(data_info.run) > 0: |
---|
| 98 | run_number = data_info.run[0] |
---|
| 99 | if len(data_info.run_name) > 0: |
---|
| 100 | run_name = data_info.run_name[run_number] |
---|
| 101 | |
---|
| 102 | f = h5py.File(filename, 'w') |
---|
| 103 | sasentry = f.create_group('sasentry01') |
---|
| 104 | sasentry['definition'] = _h5_string('NXcanSAS') |
---|
| 105 | sasentry['run'] = _h5_string(run_number) |
---|
| 106 | sasentry['run'].attrs['name'] = run_name |
---|
| 107 | sasentry['title'] = _h5_string(data_info.title) |
---|
| 108 | sasentry.attrs['canSAS_class'] = 'SASentry' |
---|
| 109 | sasentry.attrs['version'] = '1.0' |
---|
| 110 | |
---|
| 111 | i = 1 |
---|
| 112 | |
---|
| 113 | for data_obj in dataset: |
---|
| 114 | data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i)) |
---|
| 115 | data_entry.attrs['canSAS_class'] = 'SASdata' |
---|
| 116 | if isinstance(data_obj, Data1D): |
---|
| 117 | self._write_1d_data(data_obj, data_entry) |
---|
| 118 | elif isinstance(data_obj, Data2D): |
---|
[fe498b83] | 119 | self._write_2d_data(data_obj, data_entry) |
---|
[eb98f24] | 120 | i += 1 |
---|
| 121 | |
---|
| 122 | data_info = dataset[0] |
---|
[ac3353d] | 123 | # Sample metadata |
---|
[eb98f24] | 124 | sample_entry = sasentry.create_group('sassample') |
---|
| 125 | sample_entry.attrs['canSAS_class'] = 'SASsample' |
---|
[ac3353d] | 126 | sample_entry['ID'] = _h5_string(data_info.sample.name) |
---|
| 127 | sample_attrs = ['thickness', 'temperature', 'transmission'] |
---|
[eb98f24] | 128 | for key in sample_attrs: |
---|
| 129 | if getattr(data_info.sample, key) is not None: |
---|
| 130 | sample_entry.create_dataset(key, |
---|
[346d56e] | 131 | data=_h5_float(getattr(data_info.sample, key))) |
---|
[ac3353d] | 132 | _write_h5_vector(sample_entry, data_info.sample.position) |
---|
[9ddffe0] | 133 | # NXcanSAS doesn't save information about pitch, only roll |
---|
| 134 | # and yaw. The _write_h5_vector method writes vector.y, but we |
---|
| 135 | # need to write vector.z for yaw |
---|
[0836f77] | 136 | data_info.sample.orientation.y = data_info.sample.orientation.z |
---|
[ac3353d] | 137 | _write_h5_vector(sample_entry, data_info.sample.orientation, |
---|
| 138 | names=['polar_angle', 'azimuthal_angle']) |
---|
[0836f77] | 139 | if data_info.sample.details is not None\ |
---|
| 140 | and data_info.sample.details != []: |
---|
[5e906207] | 141 | details = None |
---|
| 142 | if len(data_info.sample.details) > 1: |
---|
| 143 | details = [np.string_(d) for d in data_info.sample.details] |
---|
| 144 | details = np.array(details) |
---|
| 145 | elif data_info.sample.details != []: |
---|
| 146 | details = _h5_string(data_info.sample.details[0]) |
---|
| 147 | if details is not None: |
---|
| 148 | sample_entry.create_dataset('details', data=details) |
---|
[eb98f24] | 149 | |
---|
[ac3353d] | 150 | # Instrumment metadata |
---|
[eb98f24] | 151 | instrument_entry = sasentry.create_group('sasinstrument') |
---|
| 152 | instrument_entry.attrs['canSAS_class'] = 'SASinstrument' |
---|
| 153 | instrument_entry['name'] = _h5_string(data_info.instrument) |
---|
| 154 | |
---|
[ac3353d] | 155 | # Source metadata |
---|
[eb98f24] | 156 | source_entry = instrument_entry.create_group('sassource') |
---|
| 157 | source_entry.attrs['canSAS_class'] = 'SASsource' |
---|
| 158 | if data_info.source.radiation is None: |
---|
| 159 | source_entry['radiation'] = _h5_string('neutron') |
---|
| 160 | else: |
---|
| 161 | source_entry['radiation'] = _h5_string(data_info.source.radiation) |
---|
[0836f77] | 162 | if data_info.source.beam_shape is not None: |
---|
| 163 | source_entry['beam_shape'] = _h5_string(data_info.source.beam_shape) |
---|
| 164 | wavelength_keys = { 'wavelength': 'incident_wavelength', |
---|
| 165 | 'wavelength_min':'wavelength_min', |
---|
| 166 | 'wavelength_max': 'wavelength_max', |
---|
| 167 | 'wavelength_spread': 'incident_wavelength_spread' } |
---|
[574adc7] | 168 | for sasname, nxname in wavelength_keys.items(): |
---|
[0836f77] | 169 | value = getattr(data_info.source, sasname) |
---|
| 170 | units = getattr(data_info.source, sasname + '_unit') |
---|
| 171 | if value is not None: |
---|
| 172 | source_entry[nxname] = _h5_float(value) |
---|
| 173 | source_entry[nxname].attrs['units'] = units |
---|
| 174 | _write_h5_vector(source_entry, data_info.source.beam_size, |
---|
| 175 | names=['beam_size_x', 'beam_size_y'], |
---|
| 176 | units=data_info.source.beam_size_unit, write_fn=_write_h5_float) |
---|
| 177 | |
---|
[ac3353d] | 178 | # Collimation metadata |
---|
[eb98f24] | 179 | if len(data_info.collimation) > 0: |
---|
| 180 | i = 1 |
---|
| 181 | for coll_info in data_info.collimation: |
---|
| 182 | collimation_entry = instrument_entry.create_group( |
---|
| 183 | 'sascollimation{0:0=2d}'.format(i)) |
---|
| 184 | collimation_entry.attrs['canSAS_class'] = 'SAScollimation' |
---|
| 185 | if coll_info.length is not None: |
---|
[ac3353d] | 186 | _write_h5_float(collimation_entry, coll_info.length, 'SDD') |
---|
[eb98f24] | 187 | collimation_entry['SDD'].attrs['units'] = coll_info.length_unit |
---|
| 188 | if coll_info.name is not None: |
---|
| 189 | collimation_entry['name'] = _h5_string(coll_info.name) |
---|
| 190 | else: |
---|
[ac3353d] | 191 | # Create a blank one - at least 1 set of collimation metadata |
---|
| 192 | # required by format |
---|
[eb98f24] | 193 | collimation_entry = instrument_entry.create_group('sascollimation01') |
---|
| 194 | |
---|
[ac3353d] | 195 | # Detector metadata |
---|
[eb98f24] | 196 | if len(data_info.detector) > 0: |
---|
| 197 | i = 1 |
---|
| 198 | for det_info in data_info.detector: |
---|
| 199 | detector_entry = instrument_entry.create_group( |
---|
| 200 | 'sasdetector{0:0=2d}'.format(i)) |
---|
| 201 | detector_entry.attrs['canSAS_class'] = 'SASdetector' |
---|
| 202 | if det_info.distance is not None: |
---|
[ac3353d] | 203 | _write_h5_float(detector_entry, det_info.distance, 'SDD') |
---|
[eb98f24] | 204 | detector_entry['SDD'].attrs['units'] = det_info.distance_unit |
---|
| 205 | if det_info.name is not None: |
---|
| 206 | detector_entry['name'] = _h5_string(det_info.name) |
---|
| 207 | else: |
---|
| 208 | detector_entry['name'] = _h5_string('') |
---|
[ac3353d] | 209 | if det_info.slit_length is not None: |
---|
| 210 | _write_h5_float(detector_entry, det_info.slit_length, 'slit_length') |
---|
| 211 | detector_entry['slit_length'].attrs['units'] = det_info.slit_length_unit |
---|
| 212 | _write_h5_vector(detector_entry, det_info.offset) |
---|
[9ddffe0] | 213 | # NXcanSAS doesn't save information about pitch, only roll |
---|
| 214 | # and yaw. The _write_h5_vector method writes vector.y, but we |
---|
| 215 | # need to write vector.z for yaw |
---|
| 216 | det_info.orientation.y = det_info.orientation.z |
---|
[ac3353d] | 217 | _write_h5_vector(detector_entry, det_info.orientation, |
---|
| 218 | names=['polar_angle', 'azimuthal_angle']) |
---|
| 219 | _write_h5_vector(detector_entry, det_info.beam_center, |
---|
| 220 | names=['beam_center_x', 'beam_center_y'], |
---|
| 221 | write_fn=_write_h5_float, units=det_info.beam_center_unit) |
---|
| 222 | _write_h5_vector(detector_entry, det_info.pixel_size, |
---|
| 223 | names=['x_pixel_size', 'y_pixel_size'], |
---|
| 224 | write_fn=_write_h5_float, units=det_info.pixel_size_unit) |
---|
| 225 | |
---|
[eb98f24] | 226 | i += 1 |
---|
| 227 | else: |
---|
[ac3353d] | 228 | # Create a blank one - at least 1 detector required by format |
---|
[eb98f24] | 229 | detector_entry = instrument_entry.create_group('sasdetector01') |
---|
| 230 | detector_entry.attrs['canSAS_class'] = 'SASdetector' |
---|
| 231 | detector_entry.attrs['name'] = '' |
---|
| 232 | |
---|
[ac38ab4] | 233 | # Process meta data |
---|
| 234 | if len(data_info.process) > 0 and not data_info.process[0].is_empty(): |
---|
| 235 | i = 1 |
---|
| 236 | for process in data_info.process: |
---|
| 237 | process_entry = sasentry.create_group( |
---|
| 238 | 'sasprocess{0:0=2d}'.format(i)) |
---|
| 239 | process_entry.attrs['canSAS_class'] = 'SASprocess' |
---|
| 240 | if process.name: |
---|
| 241 | name = _h5_string(process.name) |
---|
| 242 | process_entry.create_dataset('name', data=name) |
---|
| 243 | if process.date: |
---|
| 244 | date = _h5_string(process.date) |
---|
| 245 | process_entry.create_dataset('date', data=date) |
---|
| 246 | if process.description: |
---|
| 247 | desc = _h5_string(process.description) |
---|
| 248 | process_entry.create_dataset('description', data=desc) |
---|
| 249 | j = 1 |
---|
| 250 | for term in process.term: |
---|
| 251 | if term: |
---|
| 252 | h5_term = _h5_string(term) |
---|
| 253 | process_entry.create_dataset('term{0:0=2d}'.format(j), |
---|
| 254 | data=h5_term) |
---|
| 255 | j += 1 |
---|
| 256 | j = 1 |
---|
| 257 | for note in process.notes: |
---|
| 258 | if note: |
---|
| 259 | h5_note = _h5_string(note) |
---|
| 260 | process_entry.create_dataset('note{0:0=2d}'.format(j), |
---|
| 261 | data=h5_note) |
---|
| 262 | j += 1 |
---|
| 263 | i += 1 |
---|
| 264 | |
---|
| 265 | # Transmission Spectrum |
---|
| 266 | if len(data_info.trans_spectrum) > 0: |
---|
| 267 | i = 1 |
---|
| 268 | for trans in data_info.trans_spectrum: |
---|
| 269 | trans_entry = sasentry.create_group( |
---|
| 270 | 'sastransmission_spectrum{0:0=2d}'.format(i)) |
---|
| 271 | trans_entry.attrs['canSAS_class'] = 'SAStransmission_spectrum' |
---|
| 272 | trans_entry.attrs['signal'] = 'T' |
---|
| 273 | trans_entry.attrs['T_axes'] = 'T' |
---|
| 274 | trans_entry.attrs['name'] = trans.name |
---|
| 275 | if trans.timestamp is not '': |
---|
| 276 | trans_entry.attrs['timestamp'] = trans.timestamp |
---|
| 277 | transmission = trans_entry.create_dataset( |
---|
| 278 | 'T', data=trans.transmission) |
---|
| 279 | transmission.attrs['unertainties'] = 'Tdev' |
---|
| 280 | trans_entry.create_dataset('Tdev', |
---|
| 281 | data = trans.transmission_deviation) |
---|
| 282 | trans_entry.create_dataset('lambda', data=trans.wavelength) |
---|
| 283 | |
---|
[7fce2bc] | 284 | note_entry = sasentry.create_group('sasnote'.format(i)) |
---|
[eb98f24] | 285 | note_entry.attrs['canSAS_class'] = 'SASnote' |
---|
[7fce2bc] | 286 | notes = None |
---|
| 287 | if len(data_info.notes) > 1: |
---|
| 288 | notes = [np.string_(n) for n in data_info.notes] |
---|
| 289 | notes = np.array(notes) |
---|
| 290 | elif data_info.notes != []: |
---|
| 291 | notes = _h5_string(data_info.notes[0]) |
---|
| 292 | if notes is not None: |
---|
| 293 | note_entry.create_dataset('SASnote', data=notes) |
---|
[eb98f24] | 294 | |
---|
| 295 | f.close() |
---|
| 296 | |
---|
| 297 | def _write_1d_data(self, data_obj, data_entry): |
---|
| 298 | """ |
---|
| 299 | Writes the contents of a Data1D object to a SASdata h5py Group |
---|
| 300 | |
---|
| 301 | :param data_obj: A Data1D object to write to the file |
---|
| 302 | :param data_entry: A h5py Group object representing the SASdata |
---|
| 303 | """ |
---|
| 304 | data_entry.attrs['signal'] = 'I' |
---|
| 305 | data_entry.attrs['I_axes'] = 'Q' |
---|
[cf29187] | 306 | data_entry.attrs['Q_indices'] = [0] |
---|
[f5483ef] | 307 | q_entry = data_entry.create_dataset('Q', data=data_obj.x) |
---|
| 308 | q_entry.attrs['units'] = data_obj.x_unit |
---|
| 309 | i_entry = data_entry.create_dataset('I', data=data_obj.y) |
---|
| 310 | i_entry.attrs['units'] = data_obj.y_unit |
---|
| 311 | if data_obj.dy is not None: |
---|
| 312 | i_entry.attrs['uncertainties'] = 'Idev' |
---|
| 313 | i_dev_entry = data_entry.create_dataset('Idev', data=data_obj.dy) |
---|
| 314 | i_dev_entry.attrs['units'] = data_obj.y_unit |
---|
| 315 | if data_obj.dx is not None: |
---|
[369ed11d] | 316 | q_entry.attrs['resolutions'] = 'dQ' |
---|
[f5483ef] | 317 | dq_entry = data_entry.create_dataset('dQ', data=data_obj.dx) |
---|
| 318 | dq_entry.attrs['units'] = data_obj.x_unit |
---|
| 319 | elif data_obj.dxl is not None: |
---|
[369ed11d] | 320 | q_entry.attrs['resolutions'] = ['dQl','dQw'] |
---|
[f5483ef] | 321 | dql_entry = data_entry.create_dataset('dQl', data=data_obj.dxl) |
---|
| 322 | dql_entry.attrs['units'] = data_obj.x_unit |
---|
| 323 | dqw_entry = data_entry.create_dataset('dQw', data=data_obj.dxw) |
---|
| 324 | dqw_entry.attrs['units'] = data_obj.x_unit |
---|
[eb98f24] | 325 | |
---|
[fe498b83] | 326 | def _write_2d_data(self, data, data_entry): |
---|
[eb98f24] | 327 | """ |
---|
| 328 | Writes the contents of a Data2D object to a SASdata h5py Group |
---|
| 329 | |
---|
| 330 | :param data: A Data2D object to write to the file |
---|
| 331 | :param data_entry: A h5py Group object representing the SASdata |
---|
| 332 | """ |
---|
| 333 | data_entry.attrs['signal'] = 'I' |
---|
[f5483ef] | 334 | data_entry.attrs['I_axes'] = 'Qx,Qy' |
---|
[cf29187] | 335 | data_entry.attrs['Q_indices'] = [0,1] |
---|
[eb98f24] | 336 | |
---|
[fe498b83] | 337 | (n_rows, n_cols) = (len(data.y_bins), len(data.x_bins)) |
---|
[afbd172] | 338 | |
---|
[fe498b83] | 339 | if n_rows == 0 and n_cols == 0: |
---|
[afbd172] | 340 | # Calculate rows and columns, assuming detector is square |
---|
| 341 | # Same logic as used in PlotPanel.py _get_bins |
---|
| 342 | n_cols = int(np.floor(np.sqrt(len(data.qy_data)))) |
---|
| 343 | n_rows = int(np.floor(len(data.qy_data) / n_cols)) |
---|
[eb98f24] | 344 | |
---|
[afbd172] | 345 | if n_rows * n_cols != len(data.qy_data): |
---|
| 346 | raise ValueError("Unable to calculate dimensions of 2D data") |
---|
[eb98f24] | 347 | |
---|
[f5483ef] | 348 | intensity = np.reshape(data.data, (n_rows, n_cols)) |
---|
| 349 | qx = np.reshape(data.qx_data, (n_rows, n_cols)) |
---|
[eb98f24] | 350 | qy = np.reshape(data.qy_data, (n_rows, n_cols)) |
---|
[346d56e] | 351 | |
---|
[f5483ef] | 352 | i_entry = data_entry.create_dataset('I', data=intensity) |
---|
| 353 | i_entry.attrs['units'] = data.I_unit |
---|
| 354 | qx_entry = data_entry.create_dataset('Qx', data=qx) |
---|
| 355 | qx_entry.attrs['units'] = data.Q_unit |
---|
| 356 | qy_entry = data_entry.create_dataset('Qy', data=qy) |
---|
| 357 | qy_entry.attrs['units'] = data.Q_unit |
---|
[fc51d06] | 358 | if data.err_data is not None and not all(data.err_data == [None]): |
---|
| 359 | d_i = np.reshape(data.err_data, (n_rows, n_cols)) |
---|
[f5483ef] | 360 | i_entry.attrs['uncertainties'] = 'Idev' |
---|
| 361 | i_dev_entry = data_entry.create_dataset('Idev', data=d_i) |
---|
| 362 | i_dev_entry.attrs['units'] = data.I_unit |
---|
[fc51d06] | 363 | if data.dqx_data is not None and not all(data.dqx_data == [None]): |
---|
[369ed11d] | 364 | qx_entry.attrs['resolutions'] = 'dQx' |
---|
[f5483ef] | 365 | dqx_entry = data_entry.create_dataset('dQx', data=data.dqx_data) |
---|
| 366 | dqx_entry.attrs['units'] = data.Q_unit |
---|
[fc51d06] | 367 | if data.dqy_data is not None and not all(data.dqy_data == [None]): |
---|
[369ed11d] | 368 | qy_entry.attrs['resolutions'] = 'dQy' |
---|
| 369 | dqy_entry = data_entry.create_dataset('dQy', data=data.dqy_data) |
---|
[f5483ef] | 370 | dqy_entry.attrs['units'] = data.Q_unit |
---|