Changeset c222c27 in sasview
- Timestamp:
- Nov 15, 2018 2:09:18 PM (6 years ago)
- Branches:
- master, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249
- Children:
- 9220e89c
- Parents:
- a165bee (diff), f560e23 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Files:
-
- 7 added
- 8 deleted
- 32 edited
- 7 moved
Legend:
- Unmodified
- Added
- Removed
-
run.py
rbc8b8a1 r952ea1f 67 67 68 68 69 def prepare( ):69 def prepare(rebuild=True): 70 70 # Don't create *.pyc files 71 71 sys.dont_write_bytecode = True … … 95 95 try: 96 96 import periodictable 97 except :97 except ImportError: 98 98 addpath(joinpath(root, '..', 'periodictable')) 99 99 100 100 try: 101 101 import bumps 102 except :102 except ImportError: 103 103 addpath(joinpath(root, '..', 'bumps')) 104 104 105 105 try: 106 106 import tinycc 107 except :107 except ImportError: 108 108 addpath(joinpath(root, '../tinycc/build/lib')) 109 109 … … 111 111 #addpath(os.path.join(root, '..','wxPython-src-3.0.0.0','wxPython')) 112 112 113 # Build project if the build directory does not already exist. 114 # PAK: with "update" we can always build since it is fast 115 if True or not os.path.exists(build_path): 113 # Put the sas source tree on the path 114 addpath(joinpath(root, 'src')) 115 116 # Put sasmodels on the path 117 addpath(joinpath(root, '../sasmodels/')) 118 119 # Check if the C extensions are already built 120 try: 121 from sas.sascalc.pr import _pr_inversion 122 from sas.sascalc.calculator import _sld2i 123 from sas.sascalc.file_converter import _bsl_loader 124 except ImportError: 125 rebuild = True 126 127 # Build C extensions if necessary. Do an inplace build to simplify path. 128 if rebuild: 116 129 import subprocess 117 build_cmd = [sys.executable, "setup.py", "build ", "update"]130 build_cmd = [sys.executable, "setup.py", "build_ext", "--inplace", "update"] 118 131 if os.name == 'nt': 119 132 build_cmd.append('--compiler=tinycc') … … 122 135 with cd(root): 123 136 subprocess.call(build_cmd, shell=shell) 124 125 # Put the source trees on the path126 addpath(joinpath(root, 'src'))127 128 # sasmodels on the path129 addpath(joinpath(root, '../sasmodels/'))130 131 # The sas.models package Compiled Model files should be pulled in from the build directory even though132 # the source is stored in src/sas/models.133 134 # Compiled modules need to be pulled from the build directory.135 # Some packages are not where they are needed, so load them explicitly.136 import sas.sascalc.pr137 sas.sascalc.pr.core = import_package('sas.sascalc.pr.core',138 joinpath(build_path, 'sas', 'sascalc', 'pr', 'core'))139 140 # Compiled modules need to be pulled from the build directory.141 # Some packages are not where they are needed, so load them explicitly.142 import sas.sascalc.file_converter143 sas.sascalc.file_converter.core = import_package('sas.sascalc.file_converter.core',144 joinpath(build_path, 'sas', 'sascalc', 'file_converter', 'core'))145 146 import sas.sascalc.calculator147 sas.sascalc.calculator.core = import_package('sas.sascalc.calculator.core',148 joinpath(build_path, 'sas', 'sascalc', 'calculator', 'core'))149 150 sys.path.append(build_path)151 137 152 138 set_git_tag() -
setup.py
rc16172d r952ea1f 12 12 import shutil 13 13 import sys 14 from distutils.command.build_ext import build_ext15 from distutils.core import Command16 14 17 15 import numpy as np 16 18 17 from setuptools import Extension, setup 18 from setuptools import Command 19 from setuptools.command.build_ext import build_ext 19 20 20 21 try: … … 22 23 except ImportError: 23 24 pass 25 26 # Convert "test" argument to "pytest" so 'python setup.py test' works 27 sys.argv = [("pytest" if s == "test" else s) for s in sys.argv] 24 28 25 29 # Manage version number ###################################### … … 246 250 # sas.sascalc.calculator 247 251 gen_dir = os.path.join("src", "sas", "sascalc", "calculator", "c_extensions") 248 package_dir["sas.sascalc.calculator.core"] = gen_dir249 252 package_dir["sas.sascalc.calculator"] = os.path.join( 250 253 "src", "sas", "sascalc", "calculator") 251 packages. extend(["sas.sascalc.calculator", "sas.sascalc.calculator.core"])252 ext_modules.append(Extension("sas.sascalc.calculator. core.sld2i",254 packages.append("sas.sascalc.calculator") 255 ext_modules.append(Extension("sas.sascalc.calculator._sld2i", 253 256 sources=[ 254 257 os.path.join(gen_dir, "sld2i_module.c"), … … 258 261 ], 259 262 include_dirs=[gen_dir], 260 ) 261 ) 263 )) 262 264 263 265 # sas.sascalc.pr 264 266 srcdir = os.path.join("src", "sas", "sascalc", "pr", "c_extensions") 265 package_dir["sas.sascalc.pr.core"] = srcdir266 267 package_dir["sas.sascalc.pr"] = os.path.join("src", "sas", "sascalc", "pr") 267 packages. extend(["sas.sascalc.pr", "sas.sascalc.pr.core"])268 ext_modules.append(Extension("sas.sascalc.pr. core.pr_inversion",268 packages.append("sas.sascalc.pr") 269 ext_modules.append(Extension("sas.sascalc.pr._pr_inversion", 269 270 sources=[os.path.join(srcdir, "Cinvertor.c"), 270 271 os.path.join(srcdir, "invertor.c"), … … 276 277 # sas.sascalc.file_converter 277 278 mydir = os.path.join("src", "sas", "sascalc", "file_converter", "c_ext") 278 package_dir["sas.sascalc.file_converter.core"] = mydir279 279 package_dir["sas.sascalc.file_converter"] = os.path.join( 280 280 "src", "sas", "sascalc", "file_converter") 281 packages.extend(["sas.sascalc.file_converter", 282 "sas.sascalc.file_converter.core"]) 283 ext_modules.append(Extension("sas.sascalc.file_converter.core.bsl_loader", 281 packages.append("sas.sascalc.file_converter") 282 ext_modules.append(Extension("sas.sascalc.file_converter._bsl_loader", 284 283 sources=[os.path.join(mydir, "bsl_loader.c")], 285 284 include_dirs=[np.get_include()], … … 443 442 cmdclass={'build_ext': build_ext_subclass, 444 443 'docs': BuildSphinxCommand, 445 'disable_openmp': DisableOpenMPCommand} 444 'disable_openmp': DisableOpenMPCommand}, 445 setup_requires=['pytest-runner'] if 'pytest' in sys.argv else [], 446 tests_require=['pytest'], 446 447 ) -
src/sas/sascalc/calculator/c_extensions/sld2i_module.c
ra1daf86 r7ba6470 2 2 SLD2I module to perform point and I calculations 3 3 */ 4 #include <stdio.h> 5 6 //#define Py_LIMITED_API 0x03020000 4 7 #include <Python.h> 5 #include <stdio.h> 8 6 9 #include "sld2i.h" 7 10 … … 13 16 #endif 14 17 15 16 // Utilities 17 #define INVECTOR(obj,buf,len) \ 18 do { \ 19 int err = PyObject_AsReadBuffer(obj, (const void **)(&buf), &len); \ 20 if (err < 0) return NULL; \ 21 len /= sizeof(*buf); \ 22 } while (0) 23 24 #define OUTVECTOR(obj,buf,len) \ 25 do { \ 26 int err = PyObject_AsWriteBuffer(obj, (void **)(&buf), &len); \ 27 if (err < 0) return NULL; \ 28 len /= sizeof(*buf); \ 29 } while (0) 30 18 // Vector binding glue 19 #if (PY_VERSION_HEX > 0x03000000) && !defined(Py_LIMITED_API) 20 // Assuming that a view into a writable vector points to a 21 // non-changing pointer for the duration of the C call, capture 22 // the view pointer and immediately free the view. 23 #define VECTOR(VEC_obj, VEC_buf, VEC_len) do { \ 24 Py_buffer VEC_view; \ 25 int VEC_err = PyObject_GetBuffer(VEC_obj, &VEC_view, PyBUF_WRITABLE|PyBUF_FORMAT); \ 26 if (VEC_err < 0 || sizeof(*VEC_buf) != VEC_view.itemsize) return NULL; \ 27 VEC_buf = (typeof(VEC_buf))VEC_view.buf; \ 28 VEC_len = VEC_view.len/sizeof(*VEC_buf); \ 29 PyBuffer_Release(&VEC_view); \ 30 } while (0) 31 #else 32 #define VECTOR(VEC_obj, VEC_buf, VEC_len) do { \ 33 int VEC_err = PyObject_AsWriteBuffer(VEC_obj, (void **)(&VEC_buf), &VEC_len); \ 34 if (VEC_err < 0) return NULL; \ 35 VEC_len /= sizeof(*VEC_buf); \ 36 } while (0) 37 #endif 31 38 32 39 /** … … 73 80 //printf("new GenI\n"); 74 81 if (!PyArg_ParseTuple(args, "iOOOOOOOOddd", &is_avg, &x_val_obj, &y_val_obj, &z_val_obj, &sldn_val_obj, &mx_val_obj, &my_val_obj, &mz_val_obj, &vol_pix_obj, &inspin, &outspin, &stheta)) return NULL; 75 INVECTOR(x_val_obj, x_val, n_x);76 INVECTOR(y_val_obj, y_val, n_y);77 INVECTOR(z_val_obj, z_val, n_z);78 INVECTOR(sldn_val_obj, sldn_val, n_sld);79 INVECTOR(mx_val_obj, mx_val, n_mx);80 INVECTOR(my_val_obj, my_val, n_my);81 INVECTOR(mz_val_obj, mz_val, n_mz);82 INVECTOR(vol_pix_obj, vol_pix, n_vol_pix);82 VECTOR(x_val_obj, x_val, n_x); 83 VECTOR(y_val_obj, y_val, n_y); 84 VECTOR(z_val_obj, z_val, n_z); 85 VECTOR(sldn_val_obj, sldn_val, n_sld); 86 VECTOR(mx_val_obj, mx_val, n_mx); 87 VECTOR(my_val_obj, my_val, n_my); 88 VECTOR(mz_val_obj, mz_val, n_mz); 89 VECTOR(vol_pix_obj, vol_pix, n_vol_pix); 83 90 sld2i = PyMem_Malloc(sizeof(GenI)); 84 91 //printf("sldi:%p\n", sld2i); … … 108 115 if (!PyArg_ParseTuple(args, "OOOO", &gen_obj, &qx_obj, &qy_obj, &I_out_obj)) return NULL; 109 116 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 110 INVECTOR(qx_obj, qx, n_qx);111 INVECTOR(qy_obj, qy, n_qy);112 OUTVECTOR(I_out_obj, I_out, n_out);117 VECTOR(qx_obj, qx, n_qx); 118 VECTOR(qy_obj, qy, n_qy); 119 VECTOR(I_out_obj, I_out, n_out); 113 120 //printf("qx, qy, I_out: %d %d %d, %d %d %d\n", qx, qy, I_out, n_qx, n_qy, n_out); 114 121 … … 136 143 if (!PyArg_ParseTuple(args, "OOO", &gen_obj, &q_obj, &I_out_obj)) return NULL; 137 144 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 138 INVECTOR(q_obj, q, n_q);139 OUTVECTOR(I_out_obj, I_out, n_out);145 VECTOR(q_obj, q, n_q); 146 VECTOR(I_out_obj, I_out, n_out); 140 147 141 148 // Sanity check … … 160 167 161 168 #define MODULE_DOC "Sld2i C Library" 162 #define MODULE_NAME " sld2i"163 #define MODULE_INIT2 init sld2i164 #define MODULE_INIT3 PyInit_ sld2i169 #define MODULE_NAME "_sld2i" 170 #define MODULE_INIT2 init_sld2i 171 #define MODULE_INIT3 PyInit__sld2i 165 172 #define MODULE_METHODS module_methods 166 173 -
src/sas/sascalc/calculator/sas_gen.py
r144e032a r952ea1f 14 14 import numpy as np 15 15 16 from . core import sld2i as mod16 from . import _sld2i 17 17 from .BaseComponent import BaseComponent 18 18 … … 145 145 self.params['Up_frac_out'], 146 146 self.params['Up_theta']) 147 model = mod.new_GenI(*args)147 model = _sld2i.new_GenI(*args) 148 148 if len(qy): 149 149 qx, qy = _vec(qx), _vec(qy) 150 150 I_out = np.empty_like(qx) 151 151 #print("npoints", qx.shape, "npixels", pos_x.shape) 152 mod.genicomXY(model, qx, qy, I_out)152 _sld2i.genicomXY(model, qx, qy, I_out) 153 153 #print("I_out after", I_out) 154 154 else: 155 155 qx = _vec(qx) 156 156 I_out = np.empty_like(qx) 157 mod.genicom(model, qx, I_out)157 _sld2i.genicom(model, qx, I_out) 158 158 vol_correction = self.data_total_volume / self.params['total_volume'] 159 159 result = (self.params['scale'] * vol_correction * I_out … … 304 304 z_dir2 *= z_dir2 305 305 mask = (x_dir2 + y_dir2 + z_dir2) <= 1.0 306 except Exception :307 logger.error( sys.exc_value)306 except Exception as exc: 307 logger.error(exc) 308 308 self.output = MagSLD(self.pos_x[mask], self.pos_y[mask], 309 309 self.pos_z[mask], self.sld_n[mask], … … 600 600 y_lines.append(y_line) 601 601 z_lines.append(z_line) 602 except Exception :603 logger.error( sys.exc_value)602 except Exception as exc: 603 logger.error(exc) 604 604 605 605 output = MagSLD(pos_x, pos_y, pos_z, sld_n, sld_mx, sld_my, sld_mz) … … 691 691 _vol_pix = float(toks[7]) 692 692 vol_pix = np.append(vol_pix, _vol_pix) 693 except Exception :693 except Exception as exc: 694 694 vol_pix = None 695 except Exception :695 except Exception as exc: 696 696 # Skip non-data lines 697 logger.error( sys.exc_value)697 logger.error(exc) 698 698 output = MagSLD(pos_x, pos_y, pos_z, sld_n, 699 699 sld_mx, sld_my, sld_mz) -
src/sas/sascalc/dataloader/file_reader_base_class.py
r9b08354 rc222c27 241 241 data.xmax = np.max(data.qx_data) 242 242 data.ymin = np.min(data.qy_data) 243 data.ymax = np.max(data.q x_data)243 data.ymax = np.max(data.qy_data) 244 244 245 245 @staticmethod -
src/sas/sascalc/file_converter/bsl_loader.py
rf00691d4 r952ea1f 1 from sas.sascalc.file_converter. core.bsl_loader import CLoader1 from sas.sascalc.file_converter._bsl_loader import CLoader 2 2 from sas.sascalc.dataloader.data_info import Data2D 3 3 from copy import deepcopy … … 67 67 'swap_bytes': int(metadata[3]) 68 68 } 69 except :69 except Exception: 70 70 is_valid = False 71 71 err_msg = "Invalid metadata in header file for {}" -
src/sas/sascalc/file_converter/c_ext/bsl_loader.c
rd5aeaa3 r952ea1f 1 #include <stdio.h> 2 #include <stdlib.h> 3 4 //#define Py_LIMITED_API 0x03020000 1 5 #include <Python.h> 6 #include <structmember.h> 2 7 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 3 8 #include <numpy/arrayobject.h> 4 #include <stdio.h> 5 #include <stdlib.h> 6 #include "structmember.h" 9 7 10 #include "bsl_loader.h" 8 11 … … 292 295 293 296 #define MODULE_DOC "C module for loading bsl." 294 #define MODULE_NAME " bsl_loader"295 #define MODULE_INIT2 init bsl_loader296 #define MODULE_INIT3 PyInit_ bsl_loader297 #define MODULE_NAME "_bsl_loader" 298 #define MODULE_INIT2 init_bsl_loader 299 #define MODULE_INIT3 PyInit__bsl_loader 297 300 #define MODULE_METHODS module_methods 298 301 -
src/sas/sascalc/pr/c_extensions/Cinvertor.c
ra52f32f r7ba6470 5 5 * 6 6 */ 7 #include <Python.h>8 #include "structmember.h"9 7 #include <stdio.h> 10 8 #include <stdlib.h> … … 12 10 #include <time.h> 13 11 12 //#define Py_LIMITED_API 0x03050000 13 #include <Python.h> 14 #include <structmember.h> 15 16 // Vector binding glue 17 #if (PY_VERSION_HEX > 0x03000000) && !defined(Py_LIMITED_API) 18 // Assuming that a view into a writable vector points to a 19 // non-changing pointer for the duration of the C call, capture 20 // the view pointer and immediately free the view. 21 #define VECTOR(VEC_obj, VEC_buf, VEC_len) do { \ 22 Py_buffer VEC_view; \ 23 int VEC_err = PyObject_GetBuffer(VEC_obj, &VEC_view, PyBUF_WRITABLE|PyBUF_FORMAT); \ 24 if (VEC_err < 0 || sizeof(*VEC_buf) != VEC_view.itemsize) return NULL; \ 25 VEC_buf = (typeof(VEC_buf))VEC_view.buf; \ 26 VEC_len = VEC_view.len/sizeof(*VEC_buf); \ 27 PyBuffer_Release(&VEC_view); \ 28 } while (0) 29 #else 30 #define VECTOR(VEC_obj, VEC_buf, VEC_len) do { \ 31 int VEC_err = PyObject_AsWriteBuffer(VEC_obj, (void **)(&VEC_buf), &VEC_len); \ 32 if (VEC_err < 0) return NULL; \ 33 VEC_len /= sizeof(*VEC_buf); \ 34 } while (0) 35 #endif 36 14 37 #include "invertor.h" 15 16 38 17 39 /// Error object for raised exceptions 18 40 PyObject * CinvertorError; 19 20 #define INVECTOR(obj,buf,len) \21 do { \22 int err = PyObject_AsReadBuffer(obj, (const void **)(&buf), &len); \23 if (err < 0) return NULL; \24 len /= sizeof(*buf); \25 } while (0)26 27 #define OUTVECTOR(obj,buf,len) \28 do { \29 int err = PyObject_AsWriteBuffer(obj, (void **)(&buf), &len); \30 if (err < 0) return NULL; \31 len /= sizeof(*buf); \32 } while (0)33 34 41 35 42 // Class definition … … 99 106 100 107 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 101 OUTVECTOR(data_obj,data,ndata);108 VECTOR(data_obj,data,ndata); 102 109 103 110 free(self->params.x); … … 131 138 132 139 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 133 OUTVECTOR(data_obj, data, ndata);140 VECTOR(data_obj, data, ndata); 134 141 135 142 // Check that the input array is large enough … … 164 171 165 172 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 166 OUTVECTOR(data_obj,data,ndata);173 VECTOR(data_obj,data,ndata); 167 174 168 175 free(self->params.y); … … 196 203 197 204 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 198 OUTVECTOR(data_obj, data, ndata);205 VECTOR(data_obj, data, ndata); 199 206 200 207 // Check that the input array is large enough … … 229 236 230 237 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 231 OUTVECTOR(data_obj,data,ndata);238 VECTOR(data_obj,data,ndata); 232 239 233 240 free(self->params.err); … … 261 268 262 269 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 263 OUTVECTOR(data_obj, data, ndata);270 VECTOR(data_obj, data, ndata); 264 271 265 272 // Check that the input array is large enough … … 517 524 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 518 525 519 OUTVECTOR(data_obj,pars,npars);526 VECTOR(data_obj,pars,npars); 520 527 521 528 // PyList of residuals … … 568 575 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 569 576 570 OUTVECTOR(data_obj,pars,npars);577 VECTOR(data_obj,pars,npars); 571 578 572 579 // Should create this list only once and refill it … … 609 616 610 617 if (!PyArg_ParseTuple(args, "Od", &data_obj, &q)) return NULL; 611 OUTVECTOR(data_obj,pars,npars);618 VECTOR(data_obj,pars,npars); 612 619 613 620 iq_value = iq(pars, self->params.d_max, (int)npars, q); … … 634 641 635 642 if (!PyArg_ParseTuple(args, "Od", &data_obj, &q)) return NULL; 636 OUTVECTOR(data_obj,pars,npars);643 VECTOR(data_obj,pars,npars); 637 644 638 645 iq_value = iq_smeared(pars, self->params.d_max, (int)npars, … … 659 666 660 667 if (!PyArg_ParseTuple(args, "Od", &data_obj, &r)) return NULL; 661 OUTVECTOR(data_obj,pars,npars);668 VECTOR(data_obj,pars,npars); 662 669 663 670 pr_value = pr(pars, self->params.d_max, (int)npars, r); … … 686 693 687 694 if (!PyArg_ParseTuple(args, "OOd", &data_obj, &err_obj, &r)) return NULL; 688 OUTVECTOR(data_obj,pars,npars);695 VECTOR(data_obj,pars,npars); 689 696 690 697 if (err_obj == Py_None) { … … 692 699 pr_err_value = 0.0; 693 700 } else { 694 OUTVECTOR(err_obj,pars_err,npars2);701 VECTOR(err_obj,pars_err,npars2); 695 702 pr_err(pars, pars_err, self->params.d_max, (int)npars, r, &pr_value, &pr_err_value); 696 703 } … … 726 733 727 734 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 728 OUTVECTOR(data_obj,pars,npars);735 VECTOR(data_obj,pars,npars); 729 736 730 737 oscill = reg_term(pars, self->params.d_max, (int)npars, 100); … … 747 754 748 755 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 749 OUTVECTOR(data_obj,pars,npars);756 VECTOR(data_obj,pars,npars); 750 757 751 758 count = npeaks(pars, self->params.d_max, (int)npars, 100); … … 768 775 769 776 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 770 OUTVECTOR(data_obj,pars,npars);777 VECTOR(data_obj,pars,npars); 771 778 772 779 fraction = positive_integral(pars, self->params.d_max, (int)npars, 100); … … 792 799 793 800 if (!PyArg_ParseTuple(args, "OO", &data_obj, &err_obj)) return NULL; 794 OUTVECTOR(data_obj,pars,npars);795 OUTVECTOR(err_obj,pars_err,npars2);801 VECTOR(data_obj,pars,npars); 802 VECTOR(err_obj,pars_err,npars2); 796 803 797 804 fraction = positive_errors(pars, pars_err, self->params.d_max, (int)npars, 51); … … 813 820 814 821 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 815 OUTVECTOR(data_obj,pars,npars);822 VECTOR(data_obj,pars,npars); 816 823 817 824 value = rg(pars, self->params.d_max, (int)npars, 101); … … 833 840 834 841 if (!PyArg_ParseTuple(args, "O", &data_obj)) return NULL; 835 OUTVECTOR(data_obj,pars,npars);842 VECTOR(data_obj,pars,npars); 836 843 837 844 value = 4.0*acos(-1.0)*int_pr(pars, self->params.d_max, (int)npars, 101); … … 874 881 875 882 if (!PyArg_ParseTuple(args, "iiOO", &nfunc, &nr, &a_obj, &b_obj)) return NULL; 876 OUTVECTOR(a_obj,a,n_a);877 OUTVECTOR(b_obj,b,n_b);883 VECTOR(a_obj,a,n_a); 884 VECTOR(b_obj,b,n_b); 878 885 879 886 assert(n_b>=nfunc); … … 947 954 948 955 if (!PyArg_ParseTuple(args, "iiOO", &nfunc, &nr, &a_obj, &cov_obj)) return NULL; 949 OUTVECTOR(a_obj,a,n_a);950 OUTVECTOR(cov_obj,inv_cov,n_cov);956 VECTOR(a_obj,a,n_a); 957 VECTOR(cov_obj,inv_cov,n_cov); 951 958 952 959 assert(n_cov>=nfunc*nfunc); … … 981 988 982 989 if (!PyArg_ParseTuple(args, "iiO", &nfunc, &nr, &a_obj)) return NULL; 983 OUTVECTOR(a_obj,a,n_a);990 VECTOR(a_obj,a,n_a); 984 991 985 992 assert(n_a>=nfunc*(nr+self->params.npoints)); … … 1121 1128 1122 1129 #define MODULE_DOC "C extension module for inversion to P(r)." 1123 #define MODULE_NAME " pr_inversion"1124 #define MODULE_INIT2 init pr_inversion1125 #define MODULE_INIT3 PyInit_ pr_inversion1130 #define MODULE_NAME "_pr_inversion" 1131 #define MODULE_INIT2 init_pr_inversion 1132 #define MODULE_INIT3 PyInit__pr_inversion 1126 1133 #define MODULE_METHODS module_methods 1127 1134 -
src/sas/sascalc/pr/fit/BumpsFitting.py
r9a5097c r3e6829d 2 2 BumpsFitting module runs the bumps optimizer. 3 3 """ 4 from __future__ import division 5 4 6 import os 5 7 from datetime import timedelta, datetime … … 34 36 class Progress(object): 35 37 def __init__(self, history, max_step, pars, dof): 36 remaining_time = int(history.time[0]*( float(max_step)/history.step[0]-1))38 remaining_time = int(history.time[0]*(max_step/history.step[0]-1)) 37 39 # Depending on the time remaining, either display the expected 38 40 # time of completion, or the amount of time remaining. Use precision -
src/sas/sascalc/pr/fit/Loader.py
r574adc7 r57e48ca 1 """ 2 class Loader to load any kind of file 3 """ 4 1 5 from __future__ import print_function 2 6 3 # class Loader to load any king of file4 #import wx5 #import string6 7 import numpy as np 7 8 -
src/sas/sascalc/pr/invertor.py
r2469df7 r57e48ca 6 6 FIXME: The way the Invertor interacts with its C component should be cleaned up 7 7 """ 8 from __future__ import division 8 9 9 10 import numpy as np … … 17 18 from numpy.linalg import lstsq 18 19 from scipy import optimize 19 from sas.sascalc.pr. core.pr_inversion import Cinvertor20 from sas.sascalc.pr._pr_inversion import Cinvertor 20 21 21 22 logger = logging.getLogger(__name__) … … 71 72 A[j][i] = (Fourier transformed base function for point j) 72 73 73 We the mchoose a number of r-points, n_r, to evaluate the second74 We then choose a number of r-points, n_r, to evaluate the second 74 75 derivative of P(r) at. This is used as our regularization term. 75 76 For a vector r of length n_r, the following n_r rows are set to :: … … 144 145 x, y, err, d_max, q_min, q_max and alpha 145 146 """ 146 if 147 if name == 'x': 147 148 if 0.0 in value: 148 149 msg = "Invertor: one of your q-values is zero. " … … 268 269 A[i][j] = (Fourier transformed base function for point j) 269 270 270 We the mchoose a number of r-points, n_r, to evaluate the second271 We then choose a number of r-points, n_r, to evaluate the second 271 272 derivative of P(r) at. This is used as our regularization term. 272 273 For a vector r of length n_r, the following n_r rows are set to :: … … 416 417 A[i][j] = (Fourier transformed base function for point j) 417 418 418 We the mchoose a number of r-points, n_r, to evaluate the second419 We then choose a number of r-points, n_r, to evaluate the second 419 420 derivative of P(r) at. This is used as our regularization term. 420 421 For a vector r of length n_r, the following n_r rows are set to :: … … 473 474 474 475 # Perform the inversion (least square fit) 475 c, chi2, _, _ = lstsq(a, b )476 c, chi2, _, _ = lstsq(a, b, rcond=-1) 476 477 # Sanity check 477 478 try: … … 496 497 try: 497 498 cov = np.linalg.pinv(inv_cov) 498 err = math.fabs(chi2 / float(npts - nfunc)) * cov499 except :499 err = math.fabs(chi2 / (npts - nfunc)) * cov 500 except Exception as exc: 500 501 # We were not able to estimate the errors 501 502 # Return an empty error matrix 502 logger.error( sys.exc_value)503 logger.error(exc) 503 504 504 505 # Keep a copy of the last output … … 537 538 538 539 """ 539 from num_term import NTermEstimator540 from .num_term import NTermEstimator 540 541 estimator = NTermEstimator(self.clone()) 541 542 try: 542 543 return estimator.num_terms(isquit_func) 543 except :544 except Exception as exc: 544 545 # If we fail, estimate alpha and return the default 545 546 # number of terms 546 547 best_alpha, _, _ = self.estimate_alpha(self.nfunc) 547 logger.warning("Invertor.estimate_numterms: %s" % sys.exc_value)548 logger.warning("Invertor.estimate_numterms: %s" % exc) 548 549 return self.nfunc, best_alpha, "Could not estimate number of terms" 549 550 … … 631 632 return best_alpha, message, elapsed 632 633 633 except :634 message = "Invertor.estimate_alpha: %s" % sys.exc_value634 except Exception as exc: 635 message = "Invertor.estimate_alpha: %s" % exc 635 636 return 0, message, elapsed 636 637 … … 748 749 self.cov[i][i] = float(toks2[1]) 749 750 750 except :751 msg = "Invertor.from_file: corrupted file\n%s" % sys.exc_value751 except Exception as exc: 752 msg = "Invertor.from_file: corrupted file\n%s" % exc 752 753 raise RuntimeError(msg) 753 754 else: -
src/sas/sascalc/pr/num_term.py
r2469df7 r3e6829d 1 from __future__ import print_function 1 from __future__ import print_function, division 2 2 3 3 import math … … 51 51 osc = self.sort_osc() 52 52 dv = len(osc) 53 med = float(dv) / 2.053 med = 0.5*dv 54 54 odd = self.is_odd(dv) 55 55 medi = 0 … … 140 140 nts = self.compare_err() 141 141 div = len(nts) 142 tem = float(div) / 2.0142 tem = 0.5*div 143 143 if self.is_odd(div): 144 144 nt = nts[int(tem)] -
src/sas/sasgui/perspectives/fitting/gpu_options.py
r388aefb r8e109f9 8 8 ''' 9 9 10 import json 11 import platform 10 12 import logging 11 13 import os 12 14 import sys 15 13 16 import wx 17 18 try: 19 import pyopencl as cl 20 except ImportError: 21 cl = None 22 14 23 import sasmodels 24 import sasmodels.model_test 25 import sasmodels.kernelcl 26 15 27 from sas.sasgui.guiframe.documentation_window import DocumentationWindow 16 28 … … 169 181 clinfo = [] 170 182 platforms = [] 171 try: 172 import pyopencl as cl 173 platforms = cl.get_platforms() 174 except ImportError: 183 184 if cl is None: 175 185 logger.warn("Unable to import the pyopencl package. It may not " 176 186 "have been installed. If you wish to use OpenCL, try " 177 187 "running pip install --user pyopencl") 178 except cl.LogicError as err: 179 logger.warn("Unable to fetch the OpenCL platforms. This likely " 180 "means that the opencl drivers for your system are " 181 "not installed.") 182 logger.warn(err) 188 else: 189 try: 190 platforms = cl.get_platforms() 191 except cl.LogicError as err: 192 logger.warn("Unable to fetch the OpenCL platforms. This likely " 193 "means that the opencl drivers for your system are " 194 "not installed.") 195 logger.warn(err) 183 196 184 197 p_index = 0 … … 226 239 if "SAS_OPENCL" in os.environ: 227 240 del os.environ["SAS_OPENCL"] 228 229 #Sasmodels kernelcl doesn't exist when initiated with None 230 if 'sasmodels.kernelcl' in sys.modules: 231 sasmodels.kernelcl.ENV = None 232 233 reload(sasmodels.core) 241 sasmodels.kernelcl.reset_environment() 234 242 event.Skip() 235 243 … … 247 255 Run sasmodels check from here and report results from 248 256 """ 249 import json250 import platform251 #import sasmodels252 253 257 #The same block of code as for OK but it is needed if we want to have 254 258 #active response to Test button … … 261 265 if "SAS_OPENCL" in os.environ: 262 266 del os.environ["SAS_OPENCL"] 263 264 #Sasmodels kernelcl doesn't exist when initiated with None 265 if 'sasmodels.kernelcl' in sys.modules: 266 sasmodels.kernelcl.ENV = None 267 268 269 #Need to reload sasmodels.core module to account SAS_OPENCL = "None" 270 reload(sasmodels.core) 271 272 273 from sasmodels.model_test import model_tests 267 sasmodels.kernelcl.reset_environment() 274 268 275 269 try: 276 from sasmodels.kernelcl import environment 277 env = environment() 270 env = sasmodels.kernelcl.environment() 278 271 clinfo = [(ctx.devices[0].platform.vendor, 279 272 ctx.devices[0].platform.version, … … 282 275 ctx.devices[0].version) 283 276 for ctx in env.context] 284 except ImportError:277 except Exception: 285 278 clinfo = None 286 279 287 280 failures = [] 288 281 tests_completed = 0 289 for test in model_tests():282 for test in sasmodels.model_test.model_tests(): 290 283 try: 291 284 test() -
src/sas/sascalc/data_util/nxsunit.py
r574adc7 rb011ecb 136 136 sld = { '10^-6 Angstrom^-2': 1e-6, 'Angstrom^-2': 1 } 137 137 Q = { 'invA': 1, 'invAng': 1, 'invAngstroms': 1, '1/A': 1, 138 '1/Angstrom': 1, '1/angstrom': 1, 'A^{-1}': 1, 'cm^{-1}': 1e-8, 138 139 '10^-3 Angstrom^-1': 1e-3, '1/cm': 1e-8, '1/m': 1e-10, 139 'nm^ -1': 0.1, '1/nm': 0.1, 'n_m^-1': 0.1 }140 'nm^{-1}': 1, 'nm^-1': 0.1, '1/nm': 0.1, 'n_m^-1': 0.1 } 140 141 141 142 _caret_optional(sld) … … 157 158 # units for that particular dimension. 158 159 # Note: don't have support for dimensionless units. 159 unknown = {None:1, '???':1, '': 1, 'a.u.': 1 }160 unknown = {None:1, '???':1, '': 1, 'a.u.': 1, 'Counts': 1, 'counts': 1} 160 161 161 162 def __init__(self, name): -
src/sas/sascalc/dataloader/data_info.py
r9e6aeaf r4fdcc65 954 954 _str += "Data:\n" 955 955 _str += " Type: %s\n" % self.__class__.__name__ 956 _str += " X- & Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit) 956 _str += " X-axis: %s\t[%s]\n" % (self._xaxis, self._xunit) 957 _str += " Y-axis: %s\t[%s]\n" % (self._yaxis, self._yunit) 957 958 _str += " Z-axis: %s\t[%s]\n" % (self._zaxis, self._zunit) 958 959 _str += " Length: %g \n" % (len(self.data)) … … 983 984 qx_data=qx_data, qy_data=qy_data, 984 985 q_data=q_data, mask=mask) 986 987 clone._xaxis = self._xaxis 988 clone._yaxis = self._yaxis 989 clone._zaxis = self._zaxis 990 clone._xunit = self._xunit 991 clone._yunit = self._yunit 992 clone._zunit = self._zunit 993 clone.x_bins = self.x_bins 994 clone.y_bins = self.y_bins 985 995 986 996 clone.title = self.title … … 1153 1163 def combine_data_info_with_plottable(data, datainfo): 1154 1164 """ 1155 A function that combines the DataInfo data in self.current_datainto with a plottable_1D or 2D data object. 1165 A function that combines the DataInfo data in self.current_datainto with a 1166 plottable_1D or 2D data object. 1156 1167 1157 1168 :param data: A plottable_1D or plottable_2D data object … … 1171 1182 final_dataset.yaxis(data._yaxis, data._yunit) 1172 1183 elif isinstance(data, plottable_2D): 1173 final_dataset = Data2D(data.data, data.err_data, data.qx_data, data.qy_data, data.q_data, 1174 data.mask, data.dqx_data, data.dqy_data) 1184 final_dataset = Data2D(data.data, data.err_data, data.qx_data, 1185 data.qy_data, data.q_data, data.mask, 1186 data.dqx_data, data.dqy_data) 1175 1187 final_dataset.xaxis(data._xaxis, data._xunit) 1176 1188 final_dataset.yaxis(data._yaxis, data._yunit) 1177 1189 final_dataset.zaxis(data._zaxis, data._zunit) 1178 if len(data.data.shape) == 2: 1179 n_rows, n_cols = data.data.shape 1180 final_dataset.y_bins = data.qy_data[0::int(n_cols)] 1181 final_dataset.x_bins = data.qx_data[:int(n_cols)] 1190 final_dataset.y_bins = data.y_bins 1191 final_dataset.x_bins = data.x_bins 1182 1192 else: 1183 return_string = "Should Never Happen: _combine_data_info_with_plottable input is not a plottable1d or " + \ 1184 "plottable2d data object" 1193 return_string = ("Should Never Happen: _combine_data_info_with_plottabl" 1194 "e input is not a plottable1d or plottable2d data " 1195 "object") 1185 1196 return return_string 1186 1197 -
src/sas/sascalc/dataloader/loader.py
r4a8d55c rb1ec23d 367 367 try: 368 368 return fn(path, data) 369 except Exception: 370 pass # give other loaders a chance to succeed 371 # If we get here it is because all loaders failed 372 raise # reraises last exception 369 except Exception as exc: 370 msg = "Saving file {} using the {} writer failed.\n".format( 371 path, type(fn).__name__) 372 msg += str(exc) 373 logger.exception(msg) # give other loaders a chance to succeed 373 374 374 375 -
src/sas/sascalc/dataloader/readers/abs_reader.py
rbd5c3b1 r35ac8df 225 225 raise ValueError("ascii_reader: could not load file") 226 226 227 self.current_dataset = self.set_default_1d_units(self.current_dataset) 227 228 if data_conv_q is not None: 228 229 self.current_dataset.xaxis("\\rm{Q}", base_q_unit) 229 else:230 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}')231 230 if data_conv_i is not None: 232 231 self.current_dataset.yaxis("\\rm{Intensity}", base_i_unit) 233 else:234 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}")235 232 236 233 # Store loading process information -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r9e6aeaf r3bab401 157 157 158 158 self.remove_empty_q_values() 159 self.current_dataset.xaxis("\\rm{Q}", 'A^{-1}') 160 self.current_dataset.yaxis("\\rm{Intensity}", "cm^{-1}") 159 self.current_dataset = self.set_default_1d_units(self.current_dataset) 161 160 162 161 # Store loading process information -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r2469df7 r058f6c3 812 812 node.append(point) 813 813 self.write_node(point, "Q", datainfo.x[i], 814 {'unit': datainfo. x_unit})814 {'unit': datainfo._xunit}) 815 815 if len(datainfo.y) >= i: 816 816 self.write_node(point, "I", datainfo.y[i], 817 {'unit': datainfo. y_unit})817 {'unit': datainfo._yunit}) 818 818 if datainfo.dy is not None and len(datainfo.dy) > i: 819 819 self.write_node(point, "Idev", datainfo.dy[i], 820 {'unit': datainfo. y_unit})820 {'unit': datainfo._yunit}) 821 821 if datainfo.dx is not None and len(datainfo.dx) > i: 822 822 self.write_node(point, "Qdev", datainfo.dx[i], 823 {'unit': datainfo. x_unit})823 {'unit': datainfo._xunit}) 824 824 if datainfo.dxw is not None and len(datainfo.dxw) > i: 825 825 self.write_node(point, "dQw", datainfo.dxw[i], 826 {'unit': datainfo. x_unit})826 {'unit': datainfo._xunit}) 827 827 if datainfo.dxl is not None and len(datainfo.dxl) > i: 828 828 self.write_node(point, "dQl", datainfo.dxl[i], 829 {'unit': datainfo. x_unit})829 {'unit': datainfo._xunit}) 830 830 if datainfo.isSesans: 831 831 sesans_attrib = {'x_axis': datainfo._xaxis, -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
r61f329f0 ra165bee 1 1 """ 2 CanSAS 2Ddata reader for reading HDF5 formatted CanSAS files.2 NXcanSAS data reader for reading HDF5 formatted CanSAS files. 3 3 """ 4 4 … … 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from ..data_info import combine_data_info_with_plottable15 14 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 15 from ..file_reader_base_class import FileReader, decode 17 16 17 18 18 def h5attr(node, key, default=None): 19 19 return decode(node.attrs.get(key, default)) 20 20 21 21 22 class Reader(FileReader): 22 23 """ 23 A class for reading in CanSAS v2.0 data files. The existing iteration opens24 Mantid generated HDF5 formatted files with file extension .h5/.H5. Any25 number of data sets may be present within the file and any dimensionality26 of data may be used. Currently 1D and 2D SAS data sets are supported, but27 future implementations will include 1D and 2D SESANS data.28 29 Any number of SASdata sets may be present in a SASentry and the data within 30 can be either 1D I(Q) or 2D I(Qx, Qy).31 32 Also supports reading NXcanSAS formatted HDF5 files24 A class for reading in NXcanSAS data files. The current implementation has 25 been tested to load data generated by multiple facilities, all of which are 26 known to produce NXcanSAS standards compliant data. Any number of data sets 27 may be present within the file and any dimensionality of data may be used. 28 Currently 1D and 2D SAS data sets are supported, but should be immediately 29 extensible to SESANS data. 30 31 Any number of SASdata groups may be present in a SASentry and the data 32 within each SASdata group can be a single 1D I(Q), multi-framed 1D I(Q), 33 2D I(Qx, Qy) or multi-framed 2D I(Qx, Qy). 33 34 34 35 :Dependencies: 35 The CanSAS HDF5 reader requires h5py => v2.5.0 or later.36 The NXcanSAS HDF5 reader requires h5py => v2.5.0 or later. 36 37 """ 37 38 38 39 # CanSAS version 39 40 cansas_version = 2.0 40 # Logged warnings or messages41 logging = None42 # List of errors for the current data set43 errors = None44 # Raw file contents to be processed45 raw_data = None46 # List of plottable1D objects that should be linked to the current_datainfo47 data1d = None48 # List of plottable2D objects that should be linked to the current_datainfo49 data2d = None50 41 # Data type name 51 type_name = " CanSAS 2.0"42 type_name = "NXcanSAS" 52 43 # Wildcards 53 type = [" CanSAS 2.0 HDF5 Files (*.h5)|*.h5"]44 type = ["NXcanSAS HDF5 Files (*.h5)|*.h5|"] 54 45 # List of allowed extensions 55 46 ext = ['.h5', '.H5'] … … 81 72 except Exception as e: 82 73 if extension not in self.ext: 83 msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension) 74 msg = "NXcanSAS Reader could not load file {}".format( 75 basename + extension) 84 76 raise DefaultReaderException(msg) 85 77 raise FileContentsException(e.message) … … 95 87 self.raw_data.close() 96 88 97 for dataset in self.output: 98 if isinstance(dataset, Data1D): 99 if dataset.x.size < 5: 100 self.output = [] 101 raise FileContentsException("Fewer than 5 data points found.") 89 for data_set in self.output: 90 if isinstance(data_set, Data1D): 91 if data_set.x.size < 5: 92 exception = FileContentsException( 93 "Fewer than 5 data points found.") 94 data_set.errors.append(exception) 102 95 103 96 def reset_state(self): … … 109 102 self.data2d = [] 110 103 self.raw_data = None 111 self.errors = set() 104 self.multi_frame = False 105 self.data_frames = [] 106 self.data_uncertainty_frames = [] 107 self.errors = [] 112 108 self.logging = [] 109 self.q_names = [] 110 self.mask_name = u'' 111 self.i_name = u'' 112 self.i_node = u'' 113 self.i_uncertainties_name = u'' 114 self.q_uncertainty_names = [] 115 self.q_resolution_names = [] 113 116 self.parent_class = u'' 114 117 self.detector = Detector() … … 131 134 value = data.get(key) 132 135 class_name = h5attr(value, u'canSAS_class') 136 if isinstance(class_name, (list, tuple, np.ndarray)): 137 class_name = class_name[0] 133 138 if class_name is None: 134 139 class_name = h5attr(value, u'NX_class') … … 140 145 if isinstance(value, h5py.Group): 141 146 # Set parent class before recursion 147 last_parent_class = self.parent_class 142 148 self.parent_class = class_name 143 149 parent_list.append(key) … … 147 153 self.add_data_set(key) 148 154 elif class_prog.match(u'SASdata'): 149 self._initialize_new_data_set(parent_list) 155 self._find_data_attributes(value) 156 self._initialize_new_data_set(value) 150 157 # Recursion step to access data within the group 151 158 self.read_children(value, parent_list) 159 self.add_intermediate() 152 160 # Reset parent class when returning from recursive method 153 self.parent_class = class_name 154 self.add_intermediate() 161 self.parent_class = last_parent_class 155 162 parent_list.remove(key) 156 163 157 164 elif isinstance(value, h5py.Dataset): 158 165 # If this is a dataset, store the data appropriately 159 data_set = data[key][:]166 data_set = value.value 160 167 unit = self._get_unit(value) 161 162 # I and Q Data163 if key == u'I':164 if isinstance(self.current_dataset, plottable_2D):165 self.current_dataset.data = data_set166 self.current_dataset.zaxis("Intensity", unit)167 else:168 self.current_dataset.y = data_set.flatten()169 self.current_dataset.yaxis("Intensity", unit)170 continue171 elif key == u'Idev':172 if isinstance(self.current_dataset, plottable_2D):173 self.current_dataset.err_data = data_set.flatten()174 else:175 self.current_dataset.dy = data_set.flatten()176 continue177 elif key == u'Q':178 self.current_dataset.xaxis("Q", unit)179 if isinstance(self.current_dataset, plottable_2D):180 self.current_dataset.q = data_set.flatten()181 else:182 self.current_dataset.x = data_set.flatten()183 continue184 elif key == u'Qdev':185 self.current_dataset.dx = data_set.flatten()186 continue187 elif key == u'dQw':188 self.current_dataset.dxw = data_set.flatten()189 continue190 elif key == u'dQl':191 self.current_dataset.dxl = data_set.flatten()192 continue193 elif key == u'Qy':194 self.current_dataset.yaxis("Q_y", unit)195 self.current_dataset.qy_data = data_set.flatten()196 continue197 elif key == u'Qydev':198 self.current_dataset.dqy_data = data_set.flatten()199 continue200 elif key == u'Qx':201 self.current_dataset.xaxis("Q_x", unit)202 self.current_dataset.qx_data = data_set.flatten()203 continue204 elif key == u'Qxdev':205 self.current_dataset.dqx_data = data_set.flatten()206 continue207 elif key == u'Mask':208 self.current_dataset.mask = data_set.flatten()209 continue210 # Transmission Spectrum211 elif (key == u'T'212 and self.parent_class == u'SAStransmission_spectrum'):213 self.trans_spectrum.transmission = data_set.flatten()214 continue215 elif (key == u'Tdev'216 and self.parent_class == u'SAStransmission_spectrum'):217 self.trans_spectrum.transmission_deviation = \218 data_set.flatten()219 continue220 elif (key == u'lambda'221 and self.parent_class == u'SAStransmission_spectrum'):222 self.trans_spectrum.wavelength = data_set.flatten()223 continue224 168 225 169 for data_point in data_set: … … 231 175 # Top Level Meta Data 232 176 if key == u'definition': 233 self.current_datainfo.meta_data['reader'] = data_point 177 if isinstance(data_set, basestring): 178 self.current_datainfo.meta_data['reader'] = data_set 179 break 180 else: 181 self.current_datainfo.meta_data[ 182 'reader'] = data_point 183 # Run 234 184 elif key == u'run': 235 self.current_datainfo.run.append(data_point)236 185 try: 237 186 run_name = h5attr(value, 'name') 238 run_dict = {data_ point: run_name}187 run_dict = {data_set: run_name} 239 188 self.current_datainfo.run_name = run_dict 240 189 except Exception: 241 190 pass 191 if isinstance(data_set, basestring): 192 self.current_datainfo.run.append(data_set) 193 break 194 else: 195 self.current_datainfo.run.append(data_point) 196 # Title 242 197 elif key == u'title': 243 self.current_datainfo.title = data_point 198 if isinstance(data_set, basestring): 199 self.current_datainfo.title = data_set 200 break 201 else: 202 self.current_datainfo.title = data_point 203 # Note 244 204 elif key == u'SASnote': 245 self.current_datainfo.notes.append(data_ point)246 205 self.current_datainfo.notes.append(data_set) 206 break 247 207 # Sample Information 248 # CanSAS 2.0 format 249 elif key == u'Title' and self.parent_class == u'SASsample': 250 self.current_datainfo.sample.name = data_point 251 # NXcanSAS format 252 elif key == u'name' and self.parent_class == u'SASsample': 253 self.current_datainfo.sample.name = data_point 254 # NXcanSAS format 255 elif key == u'ID' and self.parent_class == u'SASsample': 256 self.current_datainfo.sample.name = data_point 257 elif (key == u'thickness' 258 and self.parent_class == u'SASsample'): 259 self.current_datainfo.sample.thickness = data_point 260 elif (key == u'temperature' 261 and self.parent_class == u'SASsample'): 262 self.current_datainfo.sample.temperature = data_point 263 elif (key == u'transmission' 264 and self.parent_class == u'SASsample'): 265 self.current_datainfo.sample.transmission = data_point 266 elif (key == u'x_position' 267 and self.parent_class == u'SASsample'): 268 self.current_datainfo.sample.position.x = data_point 269 elif (key == u'y_position' 270 and self.parent_class == u'SASsample'): 271 self.current_datainfo.sample.position.y = data_point 272 elif key == u'pitch' and self.parent_class == u'SASsample': 273 self.current_datainfo.sample.orientation.x = data_point 274 elif key == u'yaw' and self.parent_class == u'SASsample': 275 self.current_datainfo.sample.orientation.y = data_point 276 elif key == u'roll' and self.parent_class == u'SASsample': 277 self.current_datainfo.sample.orientation.z = data_point 278 elif (key == u'details' 279 and self.parent_class == u'SASsample'): 280 self.current_datainfo.sample.details.append(data_point) 281 208 elif self.parent_class == u'SASsample': 209 self.process_sample(data_point, key) 282 210 # Instrumental Information 283 211 elif (key == u'name' 284 212 and self.parent_class == u'SASinstrument'): 285 213 self.current_datainfo.instrument = data_point 286 elif key == u'name' and self.parent_class == u'SASdetector': 287 self.detector.name = data_point 288 elif key == u'SDD' and self.parent_class == u'SASdetector': 289 self.detector.distance = float(data_point) 290 self.detector.distance_unit = unit 291 elif (key == u'slit_length' 292 and self.parent_class == u'SASdetector'): 293 self.detector.slit_length = float(data_point) 294 self.detector.slit_length_unit = unit 295 elif (key == u'x_position' 296 and self.parent_class == u'SASdetector'): 297 self.detector.offset.x = float(data_point) 298 self.detector.offset_unit = unit 299 elif (key == u'y_position' 300 and self.parent_class == u'SASdetector'): 301 self.detector.offset.y = float(data_point) 302 self.detector.offset_unit = unit 303 elif (key == u'pitch' 304 and self.parent_class == u'SASdetector'): 305 self.detector.orientation.x = float(data_point) 306 self.detector.orientation_unit = unit 307 elif key == u'roll' and self.parent_class == u'SASdetector': 308 self.detector.orientation.z = float(data_point) 309 self.detector.orientation_unit = unit 310 elif key == u'yaw' and self.parent_class == u'SASdetector': 311 self.detector.orientation.y = float(data_point) 312 self.detector.orientation_unit = unit 313 elif (key == u'beam_center_x' 314 and self.parent_class == u'SASdetector'): 315 self.detector.beam_center.x = float(data_point) 316 self.detector.beam_center_unit = unit 317 elif (key == u'beam_center_y' 318 and self.parent_class == u'SASdetector'): 319 self.detector.beam_center.y = float(data_point) 320 self.detector.beam_center_unit = unit 321 elif (key == u'x_pixel_size' 322 and self.parent_class == u'SASdetector'): 323 self.detector.pixel_size.x = float(data_point) 324 self.detector.pixel_size_unit = unit 325 elif (key == u'y_pixel_size' 326 and self.parent_class == u'SASdetector'): 327 self.detector.pixel_size.y = float(data_point) 328 self.detector.pixel_size_unit = unit 329 elif (key == u'distance' 330 and self.parent_class == u'SAScollimation'): 331 self.collimation.length = data_point 332 self.collimation.length_unit = unit 333 elif (key == u'name' 334 and self.parent_class == u'SAScollimation'): 335 self.collimation.name = data_point 336 elif (key == u'shape' 337 and self.parent_class == u'SASaperture'): 338 self.aperture.shape = data_point 339 elif (key == u'x_gap' 340 and self.parent_class == u'SASaperture'): 341 self.aperture.size.x = data_point 342 elif (key == u'y_gap' 343 and self.parent_class == u'SASaperture'): 344 self.aperture.size.y = data_point 345 214 # Detector 215 elif self.parent_class == u'SASdetector': 216 self.process_detector(data_point, key, unit) 217 # Collimation 218 elif self.parent_class == u'SAScollimation': 219 self.process_collimation(data_point, key, unit) 220 # Aperture 221 elif self.parent_class == u'SASaperture': 222 self.process_aperture(data_point, key) 346 223 # Process Information 347 elif (key == u'Title' 348 and self.parent_class == u'SASprocess'): # CanSAS 2.0 349 self.process.name = data_point 350 elif (key == u'name' 351 and self.parent_class == u'SASprocess'): # NXcanSAS 352 self.process.name = data_point 353 elif (key == u'description' 354 and self.parent_class == u'SASprocess'): 355 self.process.description = data_point 356 elif key == u'date' and self.parent_class == u'SASprocess': 357 self.process.date = data_point 358 elif key == u'term' and self.parent_class == u'SASprocess': 359 self.process.term = data_point 360 elif self.parent_class == u'SASprocess': 361 self.process.notes.append(data_point) 362 224 elif self.parent_class == u'SASprocess': # CanSAS 2.0 225 self.process_process(data_point, key) 363 226 # Source 364 elif (key == u'wavelength' 365 and self.parent_class == u'SASdata'): 366 self.current_datainfo.source.wavelength = data_point 367 self.current_datainfo.source.wavelength_unit = unit 368 elif (key == u'incident_wavelength' 369 and self.parent_class == 'SASsource'): 370 self.current_datainfo.source.wavelength = data_point 371 self.current_datainfo.source.wavelength_unit = unit 372 elif (key == u'wavelength_max' 373 and self.parent_class == u'SASsource'): 374 self.current_datainfo.source.wavelength_max = data_point 375 self.current_datainfo.source.wavelength_max_unit = unit 376 elif (key == u'wavelength_min' 377 and self.parent_class == u'SASsource'): 378 self.current_datainfo.source.wavelength_min = data_point 379 self.current_datainfo.source.wavelength_min_unit = unit 380 elif (key == u'incident_wavelength_spread' 381 and self.parent_class == u'SASsource'): 382 self.current_datainfo.source.wavelength_spread = \ 383 data_point 384 self.current_datainfo.source.wavelength_spread_unit = \ 385 unit 386 elif (key == u'beam_size_x' 387 and self.parent_class == u'SASsource'): 388 self.current_datainfo.source.beam_size.x = data_point 389 self.current_datainfo.source.beam_size_unit = unit 390 elif (key == u'beam_size_y' 391 and self.parent_class == u'SASsource'): 392 self.current_datainfo.source.beam_size.y = data_point 393 self.current_datainfo.source.beam_size_unit = unit 394 elif (key == u'beam_shape' 395 and self.parent_class == u'SASsource'): 396 self.current_datainfo.source.beam_shape = data_point 397 elif (key == u'radiation' 398 and self.parent_class == u'SASsource'): 399 self.current_datainfo.source.radiation = data_point 400 elif (key == u'transmission' 401 and self.parent_class == u'SASdata'): 402 self.current_datainfo.sample.transmission = data_point 403 227 elif self.parent_class == u'SASsource': 228 self.process_source(data_point, key, unit) 404 229 # Everything else goes in meta_data 230 elif self.parent_class == u'SASdata': 231 if isinstance(self.current_dataset, plottable_2D): 232 self.process_2d_data_object(data_set, key, unit) 233 else: 234 self.process_1d_data_object(data_set, key, unit) 235 236 break 237 elif self.parent_class == u'SAStransmission_spectrum': 238 self.process_trans_spectrum(data_set, key) 239 break 405 240 else: 406 241 new_key = self._create_unique_key( … … 410 245 else: 411 246 # I don't know if this reachable code 412 self.errors.add("ShouldNeverHappenException") 247 self.errors.append("ShouldNeverHappenException") 248 249 def process_1d_data_object(self, data_set, key, unit): 250 """ 251 SASdata processor method for 1d data items 252 :param data_set: data from HDF5 file 253 :param key: canSAS_class attribute 254 :param unit: unit attribute 255 """ 256 if key == self.i_name: 257 if self.multi_frame: 258 for x in range(0, data_set.shape[0]): 259 self.data_frames.append(data_set[x].flatten()) 260 else: 261 self.current_dataset.y = data_set.flatten() 262 self.current_dataset.yaxis("Intensity", unit) 263 elif key == self.i_uncertainties_name: 264 if self.multi_frame: 265 for x in range(0, data_set.shape[0]): 266 self.data_uncertainty_frames.append(data_set[x].flatten()) 267 self.current_dataset.dy = data_set.flatten() 268 elif key in self.q_names: 269 self.current_dataset.xaxis("Q", unit) 270 self.current_dataset.x = data_set.flatten() 271 elif key in self.q_resolution_names: 272 if (len(self.q_resolution_names) > 1 273 and np.where(self.q_resolution_names == key)[0] == 0): 274 self.current_dataset.dxw = data_set.flatten() 275 elif (len(self.q_resolution_names) > 1 276 and np.where(self.q_resolution_names == key)[0] == 1): 277 self.current_dataset.dxl = data_set.flatten() 278 else: 279 self.current_dataset.dx = data_set.flatten() 280 elif key in self.q_uncertainty_names: 281 if (len(self.q_uncertainty_names) > 1 282 and np.where(self.q_uncertainty_names == key)[0] == 0): 283 self.current_dataset.dxw = data_set.flatten() 284 elif (len(self.q_uncertainty_names) > 1 285 and np.where(self.q_uncertainty_names == key)[0] == 1): 286 self.current_dataset.dxl = data_set.flatten() 287 else: 288 self.current_dataset.dx = data_set.flatten() 289 elif key == self.mask_name: 290 self.current_dataset.mask = data_set.flatten() 291 elif key == u'wavelength': 292 self.current_datainfo.source.wavelength = data_set[0] 293 self.current_datainfo.source.wavelength_unit = unit 294 295 def process_2d_data_object(self, data_set, key, unit): 296 if key == self.i_name: 297 self.current_dataset.data = data_set 298 self.current_dataset.zaxis("Intensity", unit) 299 elif key == self.i_uncertainties_name: 300 self.current_dataset.err_data = data_set.flatten() 301 elif key in self.q_names: 302 self.current_dataset.xaxis("Q_x", unit) 303 self.current_dataset.yaxis("Q_y", unit) 304 if self.q_names[0] == self.q_names[1]: 305 # All q data in a single array 306 self.current_dataset.qx_data = data_set[0] 307 self.current_dataset.qy_data = data_set[1] 308 elif self.q_names.index(key) == 0: 309 self.current_dataset.qx_data = data_set 310 elif self.q_names.index(key) == 1: 311 self.current_dataset.qy_data = data_set 312 elif key in self.q_uncertainty_names or key in self.q_resolution_names: 313 if ((self.q_uncertainty_names[0] == self.q_uncertainty_names[1]) or 314 (self.q_resolution_names[0] == self.q_resolution_names[1])): 315 # All q data in a single array 316 self.current_dataset.dqx_data = data_set[0].flatten() 317 self.current_dataset.dqy_data = data_set[1].flatten() 318 elif (self.q_uncertainty_names.index(key) == 0 or 319 self.q_resolution_names.index(key) == 0): 320 self.current_dataset.dqx_data = data_set.flatten() 321 elif (self.q_uncertainty_names.index(key) == 1 or 322 self.q_resolution_names.index(key) == 1): 323 self.current_dataset.dqy_data = data_set.flatten() 324 self.current_dataset.yaxis("Q_y", unit) 325 elif key == self.mask_name: 326 self.current_dataset.mask = data_set.flatten() 327 elif key == u'Qy': 328 self.current_dataset.yaxis("Q_y", unit) 329 self.current_dataset.qy_data = data_set.flatten() 330 elif key == u'Qydev': 331 self.current_dataset.dqy_data = data_set.flatten() 332 elif key == u'Qx': 333 self.current_dataset.xaxis("Q_x", unit) 334 self.current_dataset.qx_data = data_set.flatten() 335 elif key == u'Qxdev': 336 self.current_dataset.dqx_data = data_set.flatten() 337 338 def process_trans_spectrum(self, data_set, key): 339 """ 340 SAStransmission_spectrum processor 341 :param data_set: data from HDF5 file 342 :param key: canSAS_class attribute 343 """ 344 if key == u'T': 345 self.trans_spectrum.transmission = data_set.flatten() 346 elif key == u'Tdev': 347 self.trans_spectrum.transmission_deviation = data_set.flatten() 348 elif key == u'lambda': 349 self.trans_spectrum.wavelength = data_set.flatten() 350 351 def process_sample(self, data_point, key): 352 """ 353 SASsample processor 354 :param data_point: Single point from an HDF5 data file 355 :param key: class name data_point was taken from 356 """ 357 if key == u'Title': 358 self.current_datainfo.sample.name = data_point 359 elif key == u'name': 360 self.current_datainfo.sample.name = data_point 361 elif key == u'ID': 362 self.current_datainfo.sample.name = data_point 363 elif key == u'thickness': 364 self.current_datainfo.sample.thickness = data_point 365 elif key == u'temperature': 366 self.current_datainfo.sample.temperature = data_point 367 elif key == u'transmission': 368 self.current_datainfo.sample.transmission = data_point 369 elif key == u'x_position': 370 self.current_datainfo.sample.position.x = data_point 371 elif key == u'y_position': 372 self.current_datainfo.sample.position.y = data_point 373 elif key == u'pitch': 374 self.current_datainfo.sample.orientation.x = data_point 375 elif key == u'yaw': 376 self.current_datainfo.sample.orientation.y = data_point 377 elif key == u'roll': 378 self.current_datainfo.sample.orientation.z = data_point 379 elif key == u'details': 380 self.current_datainfo.sample.details.append(data_point) 381 382 def process_detector(self, data_point, key, unit): 383 """ 384 SASdetector processor 385 :param data_point: Single point from an HDF5 data file 386 :param key: class name data_point was taken from 387 :param unit: unit attribute from data set 388 """ 389 if key == u'name': 390 self.detector.name = data_point 391 elif key == u'SDD': 392 self.detector.distance = float(data_point) 393 self.detector.distance_unit = unit 394 elif key == u'slit_length': 395 self.detector.slit_length = float(data_point) 396 self.detector.slit_length_unit = unit 397 elif key == u'x_position': 398 self.detector.offset.x = float(data_point) 399 self.detector.offset_unit = unit 400 elif key == u'y_position': 401 self.detector.offset.y = float(data_point) 402 self.detector.offset_unit = unit 403 elif key == u'pitch': 404 self.detector.orientation.x = float(data_point) 405 self.detector.orientation_unit = unit 406 elif key == u'roll': 407 self.detector.orientation.z = float(data_point) 408 self.detector.orientation_unit = unit 409 elif key == u'yaw': 410 self.detector.orientation.y = float(data_point) 411 self.detector.orientation_unit = unit 412 elif key == u'beam_center_x': 413 self.detector.beam_center.x = float(data_point) 414 self.detector.beam_center_unit = unit 415 elif key == u'beam_center_y': 416 self.detector.beam_center.y = float(data_point) 417 self.detector.beam_center_unit = unit 418 elif key == u'x_pixel_size': 419 self.detector.pixel_size.x = float(data_point) 420 self.detector.pixel_size_unit = unit 421 elif key == u'y_pixel_size': 422 self.detector.pixel_size.y = float(data_point) 423 self.detector.pixel_size_unit = unit 424 425 def process_collimation(self, data_point, key, unit): 426 """ 427 SAScollimation processor 428 :param data_point: Single point from an HDF5 data file 429 :param key: class name data_point was taken from 430 :param unit: unit attribute from data set 431 """ 432 if key == u'distance': 433 self.collimation.length = data_point 434 self.collimation.length_unit = unit 435 elif key == u'name': 436 self.collimation.name = data_point 437 438 def process_aperture(self, data_point, key): 439 """ 440 SASaperture processor 441 :param data_point: Single point from an HDF5 data file 442 :param key: class name data_point was taken from 443 """ 444 if key == u'shape': 445 self.aperture.shape = data_point 446 elif key == u'x_gap': 447 self.aperture.size.x = data_point 448 elif key == u'y_gap': 449 self.aperture.size.y = data_point 450 451 def process_source(self, data_point, key, unit): 452 """ 453 SASsource processor 454 :param data_point: Single point from an HDF5 data file 455 :param key: class name data_point was taken from 456 :param unit: unit attribute from data set 457 """ 458 if key == u'incident_wavelength': 459 self.current_datainfo.source.wavelength = data_point 460 self.current_datainfo.source.wavelength_unit = unit 461 elif key == u'wavelength_max': 462 self.current_datainfo.source.wavelength_max = data_point 463 self.current_datainfo.source.wavelength_max_unit = unit 464 elif key == u'wavelength_min': 465 self.current_datainfo.source.wavelength_min = data_point 466 self.current_datainfo.source.wavelength_min_unit = unit 467 elif key == u'incident_wavelength_spread': 468 self.current_datainfo.source.wavelength_spread = data_point 469 self.current_datainfo.source.wavelength_spread_unit = unit 470 elif key == u'beam_size_x': 471 self.current_datainfo.source.beam_size.x = data_point 472 self.current_datainfo.source.beam_size_unit = unit 473 elif key == u'beam_size_y': 474 self.current_datainfo.source.beam_size.y = data_point 475 self.current_datainfo.source.beam_size_unit = unit 476 elif key == u'beam_shape': 477 self.current_datainfo.source.beam_shape = data_point 478 elif key == u'radiation': 479 self.current_datainfo.source.radiation = data_point 480 481 def process_process(self, data_point, key): 482 """ 483 SASprocess processor 484 :param data_point: Single point from an HDF5 data file 485 :param key: class name data_point was taken from 486 """ 487 term_match = re.compile(u'^term[0-9]+$') 488 if key == u'Title': # CanSAS 2.0 489 self.process.name = data_point 490 elif key == u'name': # NXcanSAS 491 self.process.name = data_point 492 elif key == u'description': 493 self.process.description = data_point 494 elif key == u'date': 495 self.process.date = data_point 496 elif term_match.match(key): 497 self.process.term.append(data_point) 498 else: 499 self.process.notes.append(data_point) 413 500 414 501 def add_intermediate(self): … … 440 527 self.data2d.append(self.current_dataset) 441 528 elif isinstance(self.current_dataset, plottable_1D): 442 self.data1d.append(self.current_dataset) 529 if self.multi_frame: 530 for x in range(0, len(self.data_frames)): 531 self.current_dataset.y = self.data_frames[x] 532 if len(self.data_uncertainty_frames) > x: 533 self.current_dataset.dy = \ 534 self.data_uncertainty_frames[x] 535 self.data1d.append(self.current_dataset) 536 else: 537 self.data1d.append(self.current_dataset) 443 538 444 539 def final_data_cleanup(self): … … 452 547 spectrum_list = [] 453 548 for spectrum in self.current_datainfo.trans_spectrum: 454 spectrum.transmission = np.delete(spectrum.transmission, [0])455 549 spectrum.transmission = spectrum.transmission.astype(np.float64) 456 spectrum.transmission_deviation = np.delete(457 spectrum.transmission_deviation, [0])458 550 spectrum.transmission_deviation = \ 459 551 spectrum.transmission_deviation.astype(np.float64) 460 spectrum.wavelength = np.delete(spectrum.wavelength, [0])461 552 spectrum.wavelength = spectrum.wavelength.astype(np.float64) 462 553 if len(spectrum.transmission) > 0: … … 466 557 # Append errors to dataset and reset class errors 467 558 self.current_datainfo.errors = self.errors 468 self.errors .clear()559 self.errors = [] 469 560 470 561 # Combine all plottables with datainfo and append each to output … … 476 567 zeros[i] = dataset.mask[i] 477 568 except: 478 self.errors.a dd(sys.exc_value)569 self.errors.append(sys.exc_value) 479 570 dataset.mask = zeros 480 571 # Calculate the actual Q matrix … … 490 581 if dataset.data.ndim == 2: 491 582 (n_rows, n_cols) = dataset.data.shape 492 dataset.y_bins = dataset.qy_data[0::n_cols] 493 dataset.x_bins = dataset.qx_data[:n_cols] 583 flat_qy = dataset.qy_data[0::n_cols].flatten() 584 # For 2D arrays of Qx and Qy, the Q value should be constant 585 # along each row -OR- each column. The direction is not 586 # specified in the NXcanSAS standard. 587 if flat_qy[0] == flat_qy[1]: 588 flat_qy = np.transpose(dataset.qy_data)[0::n_cols].flatten() 589 dataset.y_bins = np.unique(flat_qy) 590 flat_qx = dataset.qx_data[0::n_rows].flatten() 591 # For 2D arrays of Qx and Qy, the Q value should be constant 592 # along each row -OR- each column. The direction is not 593 # specified in the NXcanSAS standard. 594 if flat_qx[0] == flat_qx[1]: 595 flat_qx = np.transpose(dataset.qx_data)[0::n_rows].flatten() 596 dataset.x_bins = np.unique(flat_qx) 494 597 dataset.data = dataset.data.flatten() 598 dataset.qx_data = dataset.qx_data.flatten() 599 dataset.qy_data = dataset.qy_data.flatten() 495 600 self.current_dataset = dataset 496 601 self.send_to_output() … … 511 616 if self.current_datainfo and self.current_dataset: 512 617 self.final_data_cleanup() 618 self.data_frames = [] 619 self.data_uncertainty_frames = [] 513 620 self.data1d = [] 514 621 self.data2d = [] 515 622 self.current_datainfo = DataInfo() 516 623 517 518 def _initialize_new_data_set(self, parent_list=None): 624 def _initialize_new_data_set(self, value=None): 519 625 """ 520 626 A private class method to generate a new 1D or 2D data object based on … … 524 630 :param parent_list: List of names of parent elements 525 631 """ 526 527 if parent_list is None: 528 parent_list = [] 529 if self._find_intermediate(parent_list, "Qx"): 632 if self._is2d(value): 530 633 self.current_dataset = plottable_2D() 531 634 else: … … 535 638 self.current_datainfo.filename = self.raw_data.filename 536 639 537 def _find_intermediate(self, parent_list, basename=""): 538 """ 539 A private class used to find an entry by either using a direct key or 540 knowing the approximate basename. 541 542 :param parent_list: List of parents nodes in the HDF5 file 640 @staticmethod 641 def check_is_list_or_array(iterable): 642 try: 643 iter(iterable) 644 if (not isinstance(iterable, np.ndarray) and not isinstance( 645 iterable, list)) or (isinstance(iterable, basestring)): 646 raise TypeError 647 except TypeError: 648 if isinstance(iterable, basestring): 649 iterable = iterable.split(",") 650 else: 651 iterable = [iterable] 652 return iterable 653 654 def _find_data_attributes(self, value): 655 """ 656 A class to find the indices for Q, the name of the Qdev and Idev, and 657 the name of the mask. 658 :param value: SASdata/NXdata HDF5 Group 659 """ 660 # Initialize values to base types 661 self.mask_name = u'' 662 self.i_name = u'' 663 self.i_node = u'' 664 self.i_uncertainties_name = u'' 665 self.q_names = [] 666 self.q_uncertainty_names = [] 667 self.q_resolution_names = [] 668 # Get attributes 669 attrs = value.attrs 670 signal = attrs.get("signal", "I") 671 i_axes = attrs.get("I_axes", ["Q"]) 672 q_indices = attrs.get("Q_indices", [0]) 673 q_indices = map(int, self.check_is_list_or_array(q_indices)) 674 i_axes = self.check_is_list_or_array(i_axes) 675 keys = value.keys() 676 # Assign attributes to appropriate class variables 677 self.mask_name = attrs.get("mask") 678 for val in q_indices: 679 self.q_names.append(i_axes[val]) 680 self.i_name = signal 681 self.i_node = value.get(self.i_name) 682 for item in self.q_names: 683 if item in keys: 684 q_vals = value.get(item) 685 if q_vals.attrs.get("uncertainties") is not None: 686 self.q_uncertainty_names = q_vals.attrs.get("uncertainties") 687 elif q_vals.attrs.get("uncertainty") is not None: 688 self.q_uncertainty_names = q_vals.attrs.get("uncertainty") 689 if isinstance(self.q_uncertainty_names, basestring): 690 self.q_uncertainty_names = self.q_uncertainty_names.split(",") 691 if q_vals.attrs.get("resolutions") is not None: 692 self.q_resolution_names = q_vals.attrs.get("resolutions") 693 if isinstance(self.q_resolution_names, basestring): 694 self.q_resolution_names = self.q_resolution_names.split(",") 695 if self.i_name in keys: 696 i_vals = value.get(self.i_name) 697 self.i_uncertainties_name = i_vals.attrs.get("uncertainties") 698 if self.i_uncertainties_name is None: 699 self.i_uncertainties_name = i_vals.attrs.get("uncertainty") 700 701 def _is2d(self, value, i_base="", q_base=[]): 702 """ 703 A private class to determine if the data set is 1d or 2d. 704 705 :param value: Nexus/NXcanSAS data group 543 706 :param basename: Approximate name of an entry to search for 544 :return: 545 """ 546 547 entry = False 548 key_prog = re.compile(basename) 549 top = self.raw_data 550 for parent in parent_list: 551 top = top.get(parent) 552 for key in top.keys(): 553 if key_prog.match(key): 554 entry = True 555 break 556 return entry 707 :return: True if 2D, otherwise false 708 """ 709 i_basename = i_base if i_base != "" else self.i_name 710 i_vals = value.get(i_basename) 711 q_basename = q_base if q_base != [] else self.q_names 712 q_vals = value.get(q_basename[0]) 713 self.multi_frame = True if (i_vals is not None and q_vals is not None 714 and len(i_vals.shape) != 1 715 and len(q_vals.shape) == 1) else False 716 return (i_vals is not None and i_vals.shape is not None 717 and len(i_vals.shape) != 1 and not self.multi_frame) 557 718 558 719 def _create_unique_key(self, dictionary, name, numb=0): … … 583 744 if unit is None: 584 745 unit = h5attr(value, u'unit') 585 # Convert the unit formats586 if unit == "1/A":587 unit = "A^{-1}"588 elif unit == "1/cm":589 unit = "cm^{-1}"590 746 return unit -
src/sas/sascalc/dataloader/readers/danse_reader.py
r2469df7 rfc51d06 180 180 detector.beam_center.y = center_y * pixel 181 181 182 183 self.current_dataset.xaxis("\\rm{Q_{x}}", 'A^{-1}') 184 self.current_dataset.yaxis("\\rm{Q_{y}}", 'A^{-1}') 185 self.current_dataset.zaxis("\\rm{Intensity}", "cm^{-1}") 186 182 self.current_dataset = self.set_default_2d_units(self.current_dataset) 187 183 self.current_dataset.x_bins = x_vals 188 184 self.current_dataset.y_bins = y_vals -
src/sas/sascalc/dataloader/readers/red2d_reader.py
rc8321cfc r058f6c3 317 317 318 318 # Units of axes 319 self.current_dataset.xaxis(r"\rm{Q_{x}}", 'A^{-1}') 320 self.current_dataset.yaxis(r"\rm{Q_{y}}", 'A^{-1}') 321 self.current_dataset.zaxis(r"\rm{Intensity}", "cm^{-1}") 319 self.current_dataset = self.set_default_2d_units(self.current_dataset) 322 320 323 321 # Store loading process information -
src/sas/sascalc/file_converter/nxcansas_writer.py
r574adc7 r2ca5d57b 8 8 import os 9 9 10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader as Cansas2Reader10 from sas.sascalc.dataloader.readers.cansas_reader_HDF5 import Reader 11 11 from sas.sascalc.dataloader.data_info import Data1D, Data2D 12 12 13 class NXcanSASWriter( Cansas2Reader):13 class NXcanSASWriter(Reader): 14 14 """ 15 15 A class for writing in NXcanSAS data files. Any number of data sets may be … … 87 87 entry[names[2]].attrs['units'] = units 88 88 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in dataset]) 89 valid_data = all([issubclass(d.__class__, (Data1D, Data2D)) for d in 90 dataset]) 90 91 if not valid_data: 91 raise ValueError("All entries of dataset must be Data1D or Data2D objects") 92 raise ValueError("All entries of dataset must be Data1D or Data2D" 93 "objects") 92 94 93 95 # Get run name and number from first Data object … … 109 111 sasentry.attrs['version'] = '1.0' 110 112 111 i = 1 112 113 for data_obj in dataset: 114 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i)) 113 for i, data_obj in enumerate(dataset): 114 data_entry = sasentry.create_group("sasdata{0:0=2d}".format(i+1)) 115 115 data_entry.attrs['canSAS_class'] = 'SASdata' 116 116 if isinstance(data_obj, Data1D): … … 118 118 elif isinstance(data_obj, Data2D): 119 119 self._write_2d_data(data_obj, data_entry) 120 i += 1121 120 122 121 data_info = dataset[0] … … 148 147 sample_entry.create_dataset('details', data=details) 149 148 150 # Instrum ment metadata149 # Instrument metadata 151 150 instrument_entry = sasentry.create_group('sasinstrument') 152 151 instrument_entry.attrs['canSAS_class'] = 'SASinstrument' … … 176 175 units=data_info.source.beam_size_unit, write_fn=_write_h5_float) 177 176 178 179 177 # Collimation metadata 180 178 if len(data_info.collimation) > 0: 181 i = 1 182 for coll_info in data_info.collimation: 179 for i, coll_info in enumerate(data_info.collimation): 183 180 collimation_entry = instrument_entry.create_group( 184 'sascollimation{0:0=2d}'.format(i ))181 'sascollimation{0:0=2d}'.format(i + 1)) 185 182 collimation_entry.attrs['canSAS_class'] = 'SAScollimation' 186 183 if coll_info.length is not None: 187 184 _write_h5_float(collimation_entry, coll_info.length, 'SDD') 188 collimation_entry['SDD'].attrs['units'] = coll_info.length_unit 185 collimation_entry['SDD'].attrs['units'] =\ 186 coll_info.length_unit 189 187 if coll_info.name is not None: 190 188 collimation_entry['name'] = _h5_string(coll_info.name) 191 189 else: 192 # Create a blank one - at least 1 set of collimation metadata 193 # required by format 194 collimation_entry = instrument_entry.create_group('sascollimation01') 190 # Create a blank one - at least 1 collimation required by format 191 instrument_entry.create_group('sascollimation01') 195 192 196 193 # Detector metadata 197 194 if len(data_info.detector) > 0: 198 195 i = 1 199 for det_info in data_info.detector:196 for i, det_info in enumerate(data_info.detector): 200 197 detector_entry = instrument_entry.create_group( 201 'sasdetector{0:0=2d}'.format(i ))198 'sasdetector{0:0=2d}'.format(i + 1)) 202 199 detector_entry.attrs['canSAS_class'] = 'SASdetector' 203 200 if det_info.distance is not None: 204 201 _write_h5_float(detector_entry, det_info.distance, 'SDD') 205 detector_entry['SDD'].attrs['units'] = det_info.distance_unit 202 detector_entry['SDD'].attrs['units'] =\ 203 det_info.distance_unit 206 204 if det_info.name is not None: 207 205 detector_entry['name'] = _h5_string(det_info.name) … … 209 207 detector_entry['name'] = _h5_string('') 210 208 if det_info.slit_length is not None: 211 _write_h5_float(detector_entry, det_info.slit_length, 'slit_length') 212 detector_entry['slit_length'].attrs['units'] = det_info.slit_length_unit 209 _write_h5_float(detector_entry, det_info.slit_length, 210 'slit_length') 211 detector_entry['slit_length'].attrs['units'] =\ 212 det_info.slit_length_unit 213 213 _write_h5_vector(detector_entry, det_info.offset) 214 214 # NXcanSAS doesn't save information about pitch, only roll … … 224 224 names=['x_pixel_size', 'y_pixel_size'], 225 225 write_fn=_write_h5_float, units=det_info.pixel_size_unit) 226 227 i += 1228 226 else: 229 227 # Create a blank one - at least 1 detector required by format … … 231 229 detector_entry.attrs['canSAS_class'] = 'SASdetector' 232 230 detector_entry.attrs['name'] = '' 231 232 # Process meta data 233 for i, process in enumerate(data_info.process): 234 process_entry = sasentry.create_group('sasprocess{0:0=2d}'.format( 235 i + 1)) 236 process_entry.attrs['canSAS_class'] = 'SASprocess' 237 if process.name: 238 name = _h5_string(process.name) 239 process_entry.create_dataset('name', data=name) 240 if process.date: 241 date = _h5_string(process.date) 242 process_entry.create_dataset('date', data=date) 243 if process.description: 244 desc = _h5_string(process.description) 245 process_entry.create_dataset('description', data=desc) 246 for j, term in enumerate(process.term): 247 # Don't save empty terms 248 if term: 249 h5_term = _h5_string(term) 250 process_entry.create_dataset('term{0:0=2d}'.format( 251 j + 1), data=h5_term) 252 for j, note in enumerate(process.notes): 253 # Don't save empty notes 254 if note: 255 h5_note = _h5_string(note) 256 process_entry.create_dataset('note{0:0=2d}'.format( 257 j + 1), data=h5_note) 258 259 # Transmission Spectrum 260 for i, trans in enumerate(data_info.trans_spectrum): 261 trans_entry = sasentry.create_group( 262 'sastransmission_spectrum{0:0=2d}'.format(i + 1)) 263 trans_entry.attrs['canSAS_class'] = 'SAStransmission_spectrum' 264 trans_entry.attrs['signal'] = 'T' 265 trans_entry.attrs['T_axes'] = 'T' 266 trans_entry.attrs['name'] = trans.name 267 if trans.timestamp is not '': 268 trans_entry.attrs['timestamp'] = trans.timestamp 269 transmission = trans_entry.create_dataset('T', 270 data=trans.transmission) 271 transmission.attrs['unertainties'] = 'Tdev' 272 trans_entry.create_dataset('Tdev', 273 data=trans.transmission_deviation) 274 trans_entry.create_dataset('lambda', data=trans.wavelength) 233 275 234 276 note_entry = sasentry.create_group('sasnote'.format(i)) … … 254 296 data_entry.attrs['signal'] = 'I' 255 297 data_entry.attrs['I_axes'] = 'Q' 256 data_entry.attrs['I_uncertainties'] = 'Idev' 257 data_entry.attrs['Q_indicies'] = 0 258 259 dI = data_obj.dy 260 if dI is None: 261 dI = np.zeros((data_obj.y.shape)) 262 263 data_entry.create_dataset('Q', data=data_obj.x) 264 data_entry.create_dataset('I', data=data_obj.y) 265 data_entry.create_dataset('Idev', data=dI) 298 data_entry.attrs['Q_indices'] = [0] 299 q_entry = data_entry.create_dataset('Q', data=data_obj.x) 300 q_entry.attrs['units'] = data_obj.x_unit 301 i_entry = data_entry.create_dataset('I', data=data_obj.y) 302 i_entry.attrs['units'] = data_obj.y_unit 303 if data_obj.dy is not None: 304 i_entry.attrs['uncertainties'] = 'Idev' 305 i_dev_entry = data_entry.create_dataset('Idev', data=data_obj.dy) 306 i_dev_entry.attrs['units'] = data_obj.y_unit 307 if data_obj.dx is not None: 308 q_entry.attrs['resolutions'] = 'dQ' 309 dq_entry = data_entry.create_dataset('dQ', data=data_obj.dx) 310 dq_entry.attrs['units'] = data_obj.x_unit 311 elif data_obj.dxl is not None: 312 q_entry.attrs['resolutions'] = ['dQl','dQw'] 313 dql_entry = data_entry.create_dataset('dQl', data=data_obj.dxl) 314 dql_entry.attrs['units'] = data_obj.x_unit 315 dqw_entry = data_entry.create_dataset('dQw', data=data_obj.dxw) 316 dqw_entry.attrs['units'] = data_obj.x_unit 266 317 267 318 def _write_2d_data(self, data, data_entry): … … 273 324 """ 274 325 data_entry.attrs['signal'] = 'I' 275 data_entry.attrs['I_axes'] = 'Q,Q' 276 data_entry.attrs['I_uncertainties'] = 'Idev' 277 data_entry.attrs['Q_indicies'] = [0,1] 326 data_entry.attrs['I_axes'] = 'Qx,Qy' 327 data_entry.attrs['Q_indices'] = [0,1] 278 328 279 329 (n_rows, n_cols) = (len(data.y_bins), len(data.x_bins)) … … 288 338 raise ValueError("Unable to calculate dimensions of 2D data") 289 339 290 I = np.reshape(data.data, (n_rows, n_cols)) 291 dI = np.zeros((n_rows, n_cols)) 292 if not all(data.err_data == [None]): 293 dI = np.reshape(data.err_data, (n_rows, n_cols)) 294 qx = np.reshape(data.qx_data, (n_rows, n_cols)) 340 intensity = np.reshape(data.data, (n_rows, n_cols)) 341 qx = np.reshape(data.qx_data, (n_rows, n_cols)) 295 342 qy = np.reshape(data.qy_data, (n_rows, n_cols)) 296 343 297 I_entry = data_entry.create_dataset('I', data=I) 298 I_entry.attrs['units'] = data.I_unit 299 Qx_entry = data_entry.create_dataset('Qx', data=qx) 300 Qx_entry.attrs['units'] = data.Q_unit 301 Qy_entry = data_entry.create_dataset('Qy', data=qy) 302 Qy_entry.attrs['units'] = data.Q_unit 303 Idev_entry = data_entry.create_dataset('Idev', data=dI) 304 Idev_entry.attrs['units'] = data.I_unit 344 i_entry = data_entry.create_dataset('I', data=intensity) 345 i_entry.attrs['units'] = data.I_unit 346 qx_entry = data_entry.create_dataset('Qx', data=qx) 347 qx_entry.attrs['units'] = data.Q_unit 348 qy_entry = data_entry.create_dataset('Qy', data=qy) 349 qy_entry.attrs['units'] = data.Q_unit 350 if data.err_data is not None and not all(data.err_data == [None]): 351 d_i = np.reshape(data.err_data, (n_rows, n_cols)) 352 i_entry.attrs['uncertainties'] = 'Idev' 353 i_dev_entry = data_entry.create_dataset('Idev', data=d_i) 354 i_dev_entry.attrs['units'] = data.I_unit 355 if data.dqx_data is not None and not all(data.dqx_data == [None]): 356 qx_entry.attrs['resolutions'] = 'dQx' 357 dqx_entry = data_entry.create_dataset('dQx', data=data.dqx_data) 358 dqx_entry.attrs['units'] = data.Q_unit 359 if data.dqy_data is not None and not all(data.dqy_data == [None]): 360 qy_entry.attrs['resolutions'] = 'dQy' 361 dqy_entry = data_entry.create_dataset('dQy', data=data.dqy_data) 362 dqy_entry.attrs['units'] = data.Q_unit -
src/sas/sasgui/guiframe/gui_manager.py
r8ac05a5 r9f45f83 46 46 from sas.sasgui.guiframe.CategoryManager import CategoryManager 47 47 from sas.sascalc.dataloader.loader import Loader 48 from sas.sascalc.file_converter.nxcansas_writer import NXcanSASWriter 48 49 from sas.sasgui.guiframe.proxy import Connection 49 50 … … 2419 2420 default_name = fname 2420 2421 wildcard = "Text files (*.txt)|*.txt|"\ 2421 "CanSAS 1D files(*.xml)|*.xml" 2422 path = None 2422 "CanSAS 1D files (*.xml)|*.xml|"\ 2423 "NXcanSAS files (*.h5)|*.h5|" 2424 options = [".txt", ".xml",".h5"] 2423 2425 dlg = wx.FileDialog(self, "Choose a file", 2424 2426 self._default_save_location, … … 2430 2432 # This is MAC Fix 2431 2433 ext_num = dlg.GetFilterIndex() 2432 if ext_num == 0: 2433 ext_format = '.txt' 2434 else: 2435 ext_format = '.xml' 2434 2435 ext_format = options[ext_num] 2436 2436 path = os.path.splitext(path)[0] + ext_format 2437 2437 mypath = os.path.basename(path) 2438 2439 # Instantiate a loader 2440 loader = Loader() 2441 ext_format = ".txt" 2442 if os.path.splitext(mypath)[1].lower() == ext_format: 2438 fName = os.path.splitext(path)[0] + ext_format 2439 2440 if os.path.splitext(mypath)[1].lower() == options[0]: 2443 2441 # Make sure the ext included in the file name 2444 2442 # especially on MAC 2445 fName = os.path.splitext(path)[0] + ext_format2446 2443 self._onsaveTXT(data, fName) 2447 ext_format = ".xml" 2448 if os.path.splitext(mypath)[1].lower() == ext_format: 2444 elif os.path.splitext(mypath)[1].lower() == options[1]: 2449 2445 # Make sure the ext included in the file name 2450 2446 # especially on MAC 2451 fName = os.path.splitext(path)[0] + ext_format 2447 # Instantiate a loader 2448 loader = Loader() 2452 2449 loader.save(fName, data, ext_format) 2450 elif os.path.splitext(mypath)[1].lower() == options[2]: 2451 nxcansaswriter = NXcanSASWriter() 2452 nxcansaswriter.write([data], fName) 2453 2453 try: 2454 2454 self._default_save_location = os.path.dirname(path) … … 2477 2477 if has_errors: 2478 2478 if data.dx is not None and data.dx != []: 2479 out.write("<X> <Y> <dY><dX>\n")2479 out.write("<X>\t<Y>\t<dY>\t<dX>\n") 2480 2480 else: 2481 out.write("<X> <Y><dY>\n")2481 out.write("<X>\t<Y>\t<dY>\n") 2482 2482 else: 2483 out.write("<X> 2483 out.write("<X>\t<Y>\n") 2484 2484 2485 2485 for i in range(len(data.x)): … … 2525 2525 text += 'dY_min = %s: dY_max = %s\n' % (min(data.dy), max(data.dy)) 2526 2526 text += '\nData Points:\n' 2527 x_st = "X" 2527 text += "<index> \t<X> \t<Y> \t<dY> " 2528 text += "\t<dXl> \t<dXw>\n" if(data.dxl is not None and 2529 data.dxw is not None) else "\t<dX>\n" 2528 2530 for index in range(len(data.x)): 2529 2531 if data.dy is not None and len(data.dy) > index: … … 2536 2538 dx_val = 0.0 2537 2539 if data.dxl is not None and len(data.dxl) > index: 2538 if index == 0:2539 x_st = "Xl"2540 2540 dx_val = data.dxl[index] 2541 elif data.dxw is not None and len(data.dxw) > index: 2542 if index == 0: 2543 x_st = "Xw" 2544 dx_val = data.dxw[index] 2545 2546 if index == 0: 2547 text += "<index> \t<X> \t<Y> \t<dY> \t<d%s>\n" % x_st 2541 if data.dxw is not None and len(data.dxw) > index: 2542 dx_val = "%s \t%s" % (data.dxl[index], data.dxw[index]) 2543 2548 2544 text += "%s \t%s \t%s \t%s \t%s\n" % (index, 2549 2545 data.x[index], … … 2562 2558 """ 2563 2559 default_name = fname 2564 wildcard = "IGOR/DAT 2D file in Q_map (*.dat)|*.DAT" 2560 wildcard = "IGOR/DAT 2D file in Q_map (*.dat)|*.DAT|"\ 2561 "NXcanSAS files (*.h5)|*.h5|" 2565 2562 dlg = wx.FileDialog(self, "Choose a file", 2566 2563 self._default_save_location, … … 2574 2571 if ext_num == 0: 2575 2572 ext_format = '.dat' 2573 elif ext_num == 1: 2574 ext_format = '.h5' 2576 2575 else: 2577 2576 ext_format = '' … … 2581 2580 # Instantiate a loader 2582 2581 loader = Loader() 2583 2584 ext_format = ".dat" 2585 if os.path.splitext(mypath)[1].lower() == ext_format: 2582 if os.path.splitext(mypath)[1].lower() == '.dat': 2586 2583 # Make sure the ext included in the file name 2587 2584 # especially on MAC 2588 2585 fileName = os.path.splitext(path)[0] + ext_format 2589 2586 loader.save(fileName, data, ext_format) 2587 elif os.path.splitext(mypath)[1].lower() == '.h5': 2588 # Make sure the ext included in the file name 2589 # especially on MAC 2590 fileName = os.path.splitext(path)[0] + ext_format 2591 nxcansaswriter = NXcanSASWriter() 2592 nxcansaswriter.write([data], fileName) 2590 2593 try: 2591 2594 self._default_save_location = os.path.dirname(path) -
src/sas/sasgui/guiframe/local_perspectives/data_loader/data_loader.py
r9c7e2b8 r5218180 205 205 except NoKnownLoaderException as e: 206 206 exception_occurred = True 207 error_message = "Loading data failed! " + e.message207 error_message = "Loading data failed!\n" + e.message 208 208 file_errors[basename] = [error_message] 209 209 except Exception as e: … … 224 224 for message in error_array: 225 225 error_message += message + "\n" 226 error_message = error_message[:-1] 226 227 self.load_complete(output=output, 227 228 message=error_message, … … 231 232 self.load_complete(output=output, message="Loading data complete!", 232 233 info="info") 233 else:234 self.load_complete(output=None, message=error_message, info="error")235 234 236 235 def load_update(self, message="", info="warning"): -
test/sasdataloader/test/test_data/avg_testdata.txt
r8c9ffde r7fd5e2a 1 1 0.00019987186878 -0.01196215 0.148605728355 2 0.000453772721237 0.02091606 0. 06802830293343 0.000750492390439 -0.01337855 0. 04449029107574 0.00103996394336 0.03062 0. 05803128945285 0.0013420198959 0.0811008333333 0. 05404692891086 0.001652061869 0.167022288372 0. 06518913200317 0.00196086470492 27.5554711176 0.7350533 009572 0.000453772721237 0.02091606 0.23372601 3 0.000750492390439 -0.01337855 0.17169562 4 0.00103996394336 0.03062 0.13136407 5 0.0013420198959 0.0811008333333 0.10681163 6 0.001652061869 0.167022288372 0.10098903 7 0.00196086470492 27.5554711176 0.7350533 8 8 0.00226262401224 105.031578947 1.35744586624 9 9 0.00256734439716 82.1791776119 1.10749938588 -
test/sasdataloader/test/test_data/ring_testdata.txt
r400155b r7fd5e2a 4 4 0.628318530718 0.964040908176 0.0790933208542 5 5 0.942477796077 0.922142905769 0.0781616076625 6 1.25663706144 1.02710537736 0.08 08758975386 1.25663706144 1.02710537736 0.08136351514804 7 7 1.57079632679 1.01448978075 0.0808313893873 8 8 1.88495559215 1.04677136013 0.0828850195035 -
test/sasdataloader/test/test_data/sectorphi_testdata.txt
r8c9ffde r7fd5e2a 13 13 0.981747704247 0.893411561538 0.151685984204 14 14 1.06028752059 0.86231787 0.152618707077 15 1.13882733693 1.0607364925 0.16 427615031615 1.13882733693 1.0607364925 0.166167546658 16 16 1.21736715327 1.0684421475 0.163649496829 17 1.29590696961 1.09330437436 0.16 787164526317 1.29590696961 1.09330437436 0.16981858402 18 18 1.37444678595 0.88759347 0.150974201439 19 19 1.45298660229 1.1352002 0.172191803977 -
test/sasdataloader/test/test_data/sectorq_testdata.txt
r8c9ffde r7fd5e2a 17 17 0.00913119845523 0.405669568421 0.0705339106673 18 18 0.00938052380065 0.331241946 0.061307573431 19 0.00962825731078 0.237315993939 0.05 7865476989319 0.00962825731078 0.237315993939 0.059602636160850493 20 20 0.00987552050718 0.296916590385 0.0592796733987 -
test/sasdataloader/test/test_data/slabx_testdata.txt
r8c9ffde r7fd5e2a 21 21 -0.00184475260646 2.40154 1.09579651396 22 22 -0.00143541414791 0.065281 0.198049867458 23 -0.00102607559383 -0.04767235 0. 15438968553624 -0.000616736954402 -0.0090503 0. 096010546295723 -0.00102607559383 -0.04767235 0.52329358394690839 24 -0.000616736954402 -0.0090503 0.36635778277525377 25 25 -0.000207398273925 0.03109325 0.246629023029 26 0.000201940423805 -0.027508775 0. 08292884751426 0.000201940423805 -0.027508775 0.36314899662535211 27 27 0.000611279108096 0.03251315 0.246951260373 28 0.00102061774154 -0.00987975 0. 14423353458929 0.00142995630705 0.075937 0. 1948550743528 0.00102061774154 -0.00987975 0.38184199939241886 29 0.00142995630705 0.075937 0.53662696540520582 30 30 0.00183929475361 10.60918375 1.62858709853 31 31 0.00224863307777 106.2485 7.2886384188 -
test/sasdataloader/test/test_data/slaby_testdata.txt
r8c9ffde r7fd5e2a 1 -0.00981587154747 0.197046827778 0.0 8722263092611 -0.00981587154747 0.197046827778 0.09153902 2 2 -0.00940654133769 0.2466434 0.124972263589 3 3 -0.0089972103454 0.218745969444 0.0838510368061 4 4 -0.00858787875434 0.126093522222 0.107482002513 5 -0.00817854644886 0.310427366667 0.10 09452898525 -0.00817854644886 0.310427366667 0.10469745 6 6 -0.0077692135991 0.0843802722222 0.103942898914 7 7 -0.00735988010303 0.246036369444 0.0916479235889 -
test/sasdataloader/test/utest_abs_reader.py
rbd5c3b1 rf4e2f22 80 80 data_cor = Loader().load(find("sam14_cor.cor")) 81 81 for i in range(0, len(data_abs) - 1): 82 self.assertEqual s(data_abs.x[i], data_cor.x[i])83 self.assertEqual s(data_abs.y[i], data_cor.y[i])84 self.assertEqual s(data_abs.dxl[i], data_cor.dxl[i])85 self.assertEqual s(data_abs.dxw[i], data_cor.dxw[i])82 self.assertEqual(data_abs.x[i], data_cor.x[i]) 83 self.assertEqual(data_abs.y[i], data_cor.y[i]) 84 self.assertEqual(data_abs.dxl[i], data_cor.dxl[i]) 85 self.assertEqual(data_abs.dxw[i], data_cor.dxw[i]) 86 86 self.assertTrue(data_abs.dxl > 0) 87 87 … … 118 118 self.assertEqual(self.data.detector[0].beam_center.y, center_y) 119 119 120 self.assertEqual(self.data.I_unit, ' 1/cm')120 self.assertEqual(self.data.I_unit, 'cm^{-1}') 121 121 self.assertEqual(self.data.data[0], 1.57831) 122 122 self.assertEqual(self.data.data[1], 2.70983) -
test/sasdataloader/test/utest_ascii.py
rdb5196d r9fb4572 10 10 import unittest 11 11 from sas.sascalc.dataloader.loader import Loader 12 from sas.sascalc.dataloader.data_info import Data2D 12 13 13 14 … … 121 122 self.assertFalse(math.isnan(f_1d.y[i])) 122 123 self.assertFalse(math.isnan(f_1d.dy[i])) 124 self.assertTrue(isinstance(f_2d, Data2D)) 123 125 f_2d.data = f_2d.data.flatten() 124 126 f_2d.qx_data = f_2d.qx_data.flatten() -
test/sasdataloader/test/utest_averaging.py
rf53d684 rf4e2f22 106 106 107 107 def setUp(self): 108 filepath = find(' MAR07232_rest.h5')108 filepath = find('test_data' + os.sep + 'MAR07232_rest.h5') 109 109 self.data_list = Loader().load(filepath) 110 110 self.data = self.data_list[0] … … 121 121 122 122 o = r(self.data) 123 filepath = find(' ring_testdata.txt')123 filepath = find('test_data' + os.sep + 'ring_testdata.txt') 124 124 answer_list = Loader().load(filepath) 125 125 answer = answer_list[0] … … 142 142 o = r(self.data) 143 143 144 filepath = find(' avg_testdata.txt')144 filepath = find('test_data' + os.sep + 'avg_testdata.txt') 145 145 answer = Loader().load(filepath)[0] 146 146 for i in range(r.nbins_phi): … … 158 158 s, ds, npoints = r(self.data) 159 159 self.assertAlmostEqual(s, 34.278990899999997, 4) 160 self.assertAlmostEqual(ds, 7.8007981835194293, 4)160 self.assertAlmostEqual(ds, 8.237259999538685, 4) 161 161 self.assertAlmostEqual(npoints, 324.0000, 4) 162 162 … … 164 164 s, ds = r(self.data) 165 165 self.assertAlmostEqual(s, 0.10579935462962962, 4) 166 self.assertAlmostEqual(ds, 0.02 4076537603455028, 4)166 self.assertAlmostEqual(ds, 0.02542364197388483, 4) 167 167 168 168 def test_slabX(self): … … 177 177 o = r(self.data) 178 178 179 filepath = find(' slabx_testdata.txt')179 filepath = find('test_data' + os.sep + 'slabx_testdata.txt') 180 180 answer = Loader().load(filepath)[0] 181 181 for i in range(len(o.x)): … … 195 195 o = r(self.data) 196 196 197 filepath = find(' slaby_testdata.txt')197 filepath = find('test_data' + os.sep + 'slaby_testdata.txt') 198 198 answer = Loader().load(filepath)[0] 199 199 for i in range(len(o.x)): … … 221 221 o = r(self.data) 222 222 223 filepath = find(' ring_testdata.txt')223 filepath = find('test_data' + os.sep + 'ring_testdata.txt') 224 224 answer = Loader().load(filepath)[0] 225 225 for i in range(len(o.x)): … … 238 238 o = r(self.data) 239 239 240 filepath = find(' sectorphi_testdata.txt')240 filepath = find('test_data' + os.sep + 'sectorphi_testdata.txt') 241 241 answer = Loader().load(filepath)[0] 242 242 for i in range(len(o.x)): … … 255 255 o = r(self.data) 256 256 257 filepath = find(' sectorq_testdata.txt')257 filepath = find('test_data' + os.sep + 'sectorq_testdata.txt') 258 258 answer = Loader().load(filepath)[0] 259 259 for i in range(len(o.x)): -
test/sasdataloader/test/utest_cansas.py
rf53d684 rf4e2f22 91 91 reader = XMLreader(self.xml_valid, self.schema_1_0) 92 92 valid = reader.validate_xml() 93 if valid: 94 self.assertTrue(valid) 95 else: 96 self.assertFalse(valid) 93 self.assertTrue(valid) 97 94 98 95 def _check_data(self, data): … … 193 190 def test_save_cansas_v1_0(self): 194 191 xmlreader = XMLreader(self.isis_1_0, self.schema_1_0) 195 valid = xmlreader.validate_xml() 196 self.assertTrue(valid) 192 self.assertTrue(xmlreader.validate_xml()) 197 193 reader_generic = Loader() 198 194 dataloader = reader_generic.load(self.isis_1_0) … … 207 203 return_data = reader2.read(self.write_1_0_filename) 208 204 written_data = return_data[0] 209 XMLreader(self.write_1_0_filename, self.schema_1_0) 210 valid = xmlreader.validate_xml() 211 self.assertTrue(valid) 205 xmlreader = XMLreader(self.write_1_0_filename, self.schema_1_0) 206 self.assertTrue(xmlreader.validate_xml()) 212 207 self._check_data(written_data) 213 208 if os.path.isfile(self.write_1_0_filename): … … 260 255 self.loader = Loader() 261 256 self.datafile_basic = find("simpleexamplefile.h5") 262 self.datafile_multiplesasentry = find("cansas_1Dand2D_samedatafile.h5") 263 self.datafile_multiplesasdata = find("cansas_1Dand2D_samesasentry.h5") 264 self.datafile_multiplesasdata_multiplesasentry = find("cansas_1Dand2D_multiplesasentry_multiplesasdata.h5") 257 self.datafile_multiplesasentry = find( 258 "test_data" + os.sep + "nxcansas_1Dand2D_multisasentry.h5") 259 self.datafile_multiplesasdata = find( 260 "test_data" + os.sep + "nxcansas_1Dand2D_multisasdata.h5") 261 self.datafile_multiplesasdata_multiplesasentry = find( 262 "test_data" + os.sep + "nxcansas_1Dand2D_multisasentry_multisasdata.h5") 265 263 266 264 def test_real_data(self): … … 273 271 self._check_multiple_data(self.data[0]) 274 272 self._check_multiple_data(self.data[1]) 275 self._check_1d_data(self.data[0]) 273 if isinstance(self.data[0], Data1D): 274 self._check_1d_data(self.data[0]) 275 self._check_2d_data(self.data[1]) 276 else: 277 self._check_1d_data(self.data[1]) 278 self._check_2d_data(self.data[0]) 279 280 def test_multiple_sasdatas(self): 281 self.data = self.loader.load(self.datafile_multiplesasdata) 282 self.assertTrue(len(self.data) == 2) 283 self._check_multiple_data(self.data[0]) 284 self._check_multiple_data(self.data[1]) 285 if isinstance(self.data[0], Data1D): 286 self._check_1d_data(self.data[0]) 287 self._check_2d_data(self.data[1]) 288 else: 289 self._check_1d_data(self.data[1]) 290 self._check_2d_data(self.data[0]) 291 292 def test_multiple_sasentries_multiplesasdatas(self): 293 self.data = self.loader.load( 294 self.datafile_multiplesasdata_multiplesasentry) 295 self.assertTrue(len(self.data) == 4) 296 self._check_multiple_data(self.data[0]) 297 self._check_multiple_data(self.data[1]) 298 self._check_multiple_data(self.data[2]) 299 self._check_multiple_data(self.data[3]) 300 for data in self.data: 301 if isinstance(data, Data1D): 302 self._check_1d_data(data) 303 else: 304 self._check_2d_data(data) 276 305 277 306 def _check_multiple_data(self, data): 278 self.assertTrue(data.title == "MH4_5deg_16T_SLOW") 279 self.assertTrue(data.run[0] == '33837') 280 self.assertTrue(len(data.run) == 1) 281 self.assertTrue(data.instrument == "SANS2D") 282 self.assertTrue(data.source.radiation == "Spallation Neutron Source") 283 self.assertTrue(len(data.detector) == 1) 284 self.assertTrue(data.detector[0].name == "rear-detector") 285 self.assertTrue(data.detector[0].distance == 4.385281) 286 self.assertTrue(data.detector[0].distance_unit == 'm') 287 self.assertTrue(len(data.trans_spectrum) == 1) 307 self.assertEqual(data.title, "MH4_5deg_16T_SLOW") 308 self.assertEqual(data.run[0], '33837') 309 self.assertEqual(len(data.run), 1) 310 self.assertEqual(data.instrument, "SANS2D") 311 self.assertEqual(data.source.radiation, "Spallation Neutron Source") 312 self.assertEqual(len(data.detector), 2) 313 self.assertTrue(data.detector[0].name == "rear-detector" 314 or data.detector[1].name == "rear-detector") 315 self.assertTrue(data.detector[0].name == "front-detector" 316 or data.detector[1].name == "front-detector") 317 self.assertAlmostEqual(data.detector[0].distance + 318 data.detector[1].distance, 7230.54, 2) 319 self.assertEqual(data.detector[0].distance_unit, 'mm') 320 self.assertEqual(len(data.trans_spectrum), 1) 288 321 289 322 def _check_1d_data(self, data): 290 self.assertTrue(isinstance(data, Data1D)) 291 self.assertTrue(len(data.x) == 66) 292 self.assertTrue(len(data.x) == len(data.y)) 293 self.assertTrue(data.dy[10] == 0.20721350111248701) 294 self.assertTrue(data.y[10] == 24.193889608153476) 295 self.assertTrue(data.x[10] == 0.008981127988654792) 323 self.assertEqual(len(data.x), 66) 324 self.assertEqual(len(data.x), len(data.y)) 325 self.assertAlmostEqual(data.dy[10], 0.207214) 326 self.assertAlmostEqual(data.y[10], 24.1939) 327 self.assertAlmostEqual(data.x[10], 0.00898113) 296 328 297 329 def _check_2d_data(self, data): 298 330 self.assertTrue(isinstance(data, Data2D)) 299 self.assertTrue(len(data.x) == 66) 300 self.assertTrue(len(data.x) == len(data.y)) 301 self.assertTrue(data.dy[10] == 0.20721350111248701) 302 self.assertTrue(data.y[10] == 24.193889608153476) 303 self.assertTrue(data.x[10] == 0.008981127988654792) 331 self.assertEqual(len(data.q_data), 150*150) 332 self.assertEqual(len(data.q_data), len(data.data)) 333 self.assertAlmostEqual(data.err_data[10], 0.186723989418) 334 self.assertAlmostEqual(data.data[10], 0.465181) 335 self.assertAlmostEqual(data.qx_data[10], -0.129) 336 self.assertAlmostEqual(data.qy_data[10], -0.149) 304 337 305 338 def _check_example_data(self, data): 306 self.assert True(data.title =="")307 self.assert True(data.x.size ==100)308 self.assert True(data._xunit =="A^{-1}")309 self.assert True(data._yunit =="cm^{-1}")310 self.assert True(data.y.size ==100)339 self.assertEqual(data.title, "") 340 self.assertEqual(data.x.size, 100) 341 self.assertEqual(data._xunit, "A^{-1}") 342 self.assertEqual(data._yunit, "cm^{-1}") 343 self.assertEqual(data.y.size, 100) 311 344 self.assertAlmostEqual(data.y[40], 0.952749011516985) 312 345 self.assertAlmostEqual(data.x[40], 0.3834415188257777) -
test/sasdataloader/test/utest_generic_file_reader_class.py
r4a8d55c r282bc3f 45 45 last_f = f[0] 46 46 if hasattr(last_f, "errors"): 47 self.assertEqual s(len(last_f.errors), 1)47 self.assertEqual(len(last_f.errors), 1) 48 48 else: 49 49 self.fail("Errors did not propogate to the file properly.") … … 51 51 def test_same_file_unknown_extensions(self): 52 52 # Five files, all with the same content, but different file extensions 53 no_ext = find("test_data //TestExtensions")54 not_xml = find("test_data //TestExtensions.notxml")53 no_ext = find("test_data" + os.sep + "TestExtensions") 54 not_xml = find("test_data" + os.sep + "TestExtensions.notxml") 55 55 # Deprecated extensions 56 asc_dep = find("test_data //TestExtensions.asc")57 nxs_dep = find("test_data //TestExtensions.nxs")56 asc_dep = find("test_data" + os.sep + "TestExtensions.asc") 57 nxs_dep = find("test_data" + os.sep + "TestExtensions.nxs") 58 58 # Native extension as a baseline 59 xml_native = find("test_data //TestExtensions.xml")59 xml_native = find("test_data" + os.sep + "TestExtensions.xml") 60 60 # Load the files and check contents 61 61 no_ext_load = self.generic_reader.load(no_ext) … … 70 70 self.check_unknown_extension(xml_load[0]) 71 71 # Be sure the deprecation warning is passed with the file 72 self.assertEqual s(len(asc_load[0].errors), 1)73 self.assertEqual s(len(nxs_load[0].errors), 1)72 self.assertEqual(len(asc_load[0].errors), 1) 73 self.assertEqual(len(nxs_load[0].errors), 0) 74 74 75 75 def check_unknown_extension(self, data): 76 76 self.assertTrue(isinstance(data, Data1D)) 77 self.assertEqual s(len(data.x), 138)78 self.assertEqual s(data.sample.ID, "TK49 c10_SANS")79 self.assertEqual s(data.meta_data["loader"], "CanSAS XML 1D")77 self.assertEqual(len(data.x), 138) 78 self.assertEqual(data.sample.ID, "TK49 c10_SANS") 79 self.assertEqual(data.meta_data["loader"], "CanSAS XML 1D") 80 80 81 81 def tearDown(self): -
test/sasdataloader/test/utest_red2d_reader.py
rf53d684 rfc51d06 31 31 self.assertEqual(f.qx_data[0],-0.03573497) 32 32 self.assertEqual(f.qx_data[36863],0.2908819) 33 self.assertEqual(f.Q_unit, ' 1/A')34 self.assertEqual(f.I_unit, ' 1/cm')33 self.assertEqual(f.Q_unit, 'A^{-1}') 34 self.assertEqual(f.I_unit, 'cm^{-1}') 35 35 36 36 self.assertEqual(f.meta_data['loader'],"IGOR/DAT 2D Q_map") -
test/sasdataloader/test/utest_sesans.py
rf53d684 rf4e2f22 25 25 Test .SES in the full loader to make sure that the file type is correctly accepted 26 26 """ 27 file = Loader().load(find("sesans_examples/sphere2micron.ses")) 27 file = Loader().load(find("sesans_examples" + os.sep + 28 "sphere2micron.ses")) 28 29 f = file[0] 29 30 # self.assertEqual(f, 5) … … 44 45 Test .SES loading on a TOF dataset 45 46 """ 46 file = self.loader(find("sesans_examples /sphere_isis.ses"))47 file = self.loader(find("sesans_examples" + os.sep + "sphere_isis.ses")) 47 48 f = file[0] 48 49 self.assertEqual(len(file), 1) … … 62 63 FileContentsException, 63 64 self.loader, 64 find("sesans_examples /sesans_no_data.ses"))65 find("sesans_examples" + os.sep + "sesans_no_data.ses")) 65 66 66 67 def test_sesans_no_spin_echo_unit(self): … … 71 72 FileContentsException, 72 73 self.loader, 73 find("sesans_examples /no_spin_echo_unit.ses"))74 find("sesans_examples" + os.sep + "no_spin_echo_unit.ses")) 74 75 75 76 def test_sesans_future_version(self): … … 80 81 FileContentsException, 81 82 self.loader, 82 find("sesans_examples /next_gen.ses"))83 find("sesans_examples" + os.sep + "next_gen.ses")) 83 84 84 85 def test_sesans_mandatory_headers(self): … … 89 90 FileContentsException, 90 91 self.loader, 91 find("sesans_examples /no_wavelength.ses"))92 find("sesans_examples" + os.sep + "no_wavelength.ses")) 92 93 93 94 def test_sesans_columns_match_headers(self): … … 98 99 FileContentsException, 99 100 self.loader, 100 find("sesans_examples /too_many_headers.ses"))101 find("sesans_examples" + os.sep + "too_many_headers.ses")) 101 102 102 103 if __name__ == "__main__":
Note: See TracChangeset
for help on using the changeset viewer.