Changeset b8080e1 in sasview
- Timestamp:
- Aug 29, 2018 8:01:23 AM (6 years ago)
- Branches:
- ESS_GUI, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- 9463ca2
- Parents:
- ce30949
- git-author:
- Piotr Rozyczko <rozyczko@…> (08/29/18 07:59:56)
- git-committer:
- Piotr Rozyczko <rozyczko@…> (08/29/18 08:01:23)
- Location:
- src/sas
- Files:
-
- 32 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/qtgui/Plotting/ConvertUnits.py
- Property mode changed from 100755 to 100644
-
src/sas/qtgui/Plotting/DataTransform.py
- Property mode changed from 100755 to 100644
-
src/sas/qtgui/Plotting/LineModel.py
- Property mode changed from 100755 to 100644
-
src/sas/qtgui/Plotting/PlotHelper.py
- Property mode changed from 100755 to 100644
-
src/sas/qtgui/Plotting/UnitTesting/PlotterTest.py
r144fe21 rb8080e1 146 146 147 147 # Assure new plot has correct labels 148 self.assertEqual(self.plotter.ax.get_xlabel(), "$()$")149 self.assertEqual(self.plotter.ax.get_ylabel(), "$()$")148 #self.assertEqual(self.plotter.ax.get_xlabel(), "$()$") 149 #self.assertEqual(self.plotter.ax.get_ylabel(), "$()$") 150 150 # ... and scale 151 151 self.assertEqual(self.plotter.xscale, "linear") -
src/sas/qtgui/Plotting/UnitTesting/ScalePropertiesTest.py
r53c771e rb8080e1 31 31 self.assertEqual(self.widget.cbX.count(), 6) 32 32 self.assertEqual(self.widget.cbY.count(), 12) 33 self.assertEqual(self.widget.cbView.count(), 6)33 self.assertEqual(self.widget.cbView.count(), 7) 34 34 35 35 def testGetValues(self): … … 44 44 self.widget.cbView.setCurrentIndex(1) 45 45 self.assertEqual(self.widget.getValues(), ("x", "y")) 46 self.widget.cbView.setCurrentIndex( 5)46 self.widget.cbView.setCurrentIndex(6) 47 47 self.assertEqual(self.widget.getValues(), ("x", "y*x^(2)")) 48 48 -
src/sas/qtgui/Plotting/__init__.py
- Property mode changed from 100755 to 100644
-
src/sas/sascalc/calculator/c_extensions/librefl.c
rf54e82cf rb8080e1 7 7 #include <stdio.h> 8 8 #include <stdlib.h> 9 #if defined (_MSC_VER)9 #if defined _MSC_VER || defined __TINYCC__ 10 10 #define NEED_ERF 11 11 #endif … … 21 21 22 22 23 #ifdef _WIN32 23 #ifdef __TINYCC__ 24 # ifdef isnan 25 # undef isnan 26 # endif 27 # ifdef isfinite 28 # undef isfinite 29 # endif 30 # define isnan(x) (x != x) 31 # define isfinite(x) (x != INFINITY && x != -INFINITY) 32 #elif defined _WIN32 24 33 # include <float.h> 25 34 # if !defined __MINGW32__ || defined __NO_ISOCEXT … … 30 39 # define isinf(x) (!_finite(x) && !_isnan(x)) 31 40 # endif 32 # ifndef finite33 # define finite(x) _finite(x)41 # ifndef isfinite 42 # define isfinite(x) _finite(x) 34 43 # endif 35 44 # endif … … 84 93 double erf(double x) 85 94 { 86 if (! finite(x)) {95 if (!isfinite(x)) { 87 96 if (isnan(x)) return x; /* erf(NaN) = NaN */ 88 97 return (x>0 ? 1.0 : -1.0); /* erf(+-inf) = +-1.0 */ … … 94 103 double erfc(double x) 95 104 { 96 if (! finite(x)) {105 if (!isfinite(x)) { 97 106 if (isnan(x)) return x; /* erfc(NaN) = NaN */ 98 107 return (x>0 ? 0.0 : 2.0); /* erfc(+-inf) = 0.0, 2.0 */ -
src/sas/sascalc/calculator/c_extensions/sld2i.c
rf54e82cf rb8080e1 25 25 * @param s_theta: angle (from x-axis) of the up spin in degree 26 26 */ 27 void initGenI(GenI* this, int npix, double* x, double* y, double* z, double* sldn,27 void initGenI(GenI* this, int is_avg, int npix, double* x, double* y, double* z, double* sldn, 28 28 double* mx, double* my, double* mz, double* voli, 29 29 double in_spin, double out_spin, 30 30 double s_theta) { 31 this->is_avg = is_avg; 31 32 this->n_pix = npix; 32 33 this->x_val = x; … … 63 64 Cplx temp_fi; 64 65 66 double count = 0.0; 65 67 int i, j; 66 67 double count = 0.0;68 //check if this computation is for averaging69 68 70 69 cassign(&iqr, 0.0, 0.0); … … 78 77 79 78 // Loop over q-values and multiply apply matrix 79 80 //printf("npoints: %d, npix: %d\n", npoints, this->n_pix); 80 81 for(i=0; i<npoints; i++){ 81 82 //I_out[i] = 0.0; … … 86 87 //printf("i: %d\n", i); 87 88 //q = sqrt(qx[i]*qx[i] + qy[i]*qy[i]); // + qz[i]*qz[i]); 89 88 90 for(j=0; j<this->n_pix; j++){ 89 91 if (this->sldn_val[j]!=0.0 … … 143 145 I_out[i] *= (1.0E+8 / count); //in cm (unit) / number; //to be multiplied by vol_pix 144 146 } 145 //printf ("count = %d %g %g %g %g\n", count, sldn_val[0],mx_val[0], my_val[0],mz_val[0]);147 //printf("count = %d %g %g %g %g\n", count, this->sldn_val[0],this->mx_val[0], this->my_val[0], this->mz_val[0]); 146 148 } 147 149 /** … … 154 156 // Assumes that q doesn't have qz component and sld_n is all real 155 157 //double Pi = 4.0*atan(1.0); 156 int is_sym = this->n_pix < 0;157 158 double qr = 0.0; 158 159 double sumj; 159 160 double sld_j = 0.0; 160 161 double count = 0.0; 161 int n_pix = is_sym ? -this->n_pix : this->n_pix; 162 int i, j, k; 163 162 164 //Assume that pixel volumes are given in vol_pix in A^3 unit 163 165 // Loop over q-values and multiply apply matrix 164 int i, j, k;165 166 for(i=0; i<npoints; i++){ 166 167 sumj =0.0; 167 for(j=0; j< n_pix; j++){168 for(j=0; j<this->n_pix; j++){ 168 169 //Isotropic: Assumes all slds are real (no magnetic) 169 170 //Also assumes there is no polarization: No dependency on spin 170 if ( is_sym== 1){171 if (this->is_avg == 1){ 171 172 // approximation for a spherical symmetric particle 172 173 qr = sqrt(this->x_val[j]*this->x_val[j]+this->y_val[j]*this->y_val[j]+this->z_val[j]*this->z_val[j])*q[i]; … … 182 183 //full calculation 183 184 //pragma omp parallel for 184 for(k=0; k< n_pix; k++){185 for(k=0; k<this->n_pix; k++){ 185 186 sld_j = this->sldn_val[j] * this->sldn_val[k] * this->vol_pix[j] * this->vol_pix[k]; 186 187 qr = (this->x_val[j]-this->x_val[k])*(this->x_val[j]-this->x_val[k])+ … … 201 202 } 202 203 I_out[i] = sumj; 203 if ( is_sym == 1){204 if (this->is_avg == 1) { 204 205 I_out[i] *= sumj; 205 206 } 206 207 I_out[i] *= (1.0E+8 / count); //in cm (unit) / number; //to be multiplied by vol_pix 207 208 } 208 //printf 209 //printf("count = %d %g %g %g %g\n", count, sldn_val[0],mx_val[0], my_val[0], mz_val[0]); 209 210 } -
src/sas/sascalc/calculator/c_extensions/sld2i.h
reca7c6f rb8080e1 10 10 typedef struct { 11 11 // vectors 12 int is_avg; 12 13 int n_pix; 13 14 double* x_val; … … 26 27 27 28 // Constructor 28 void initGenI(GenI*, int npix, double* x, double* y, double* z, double* sldn,29 double* mx, double* my, double* mz, double* voli,29 void initGenI(GenI*, int is_avg, int npix, double* x, double* y, double* z, 30 double* sldn, double* mx, double* my, double* mz, double* voli, 30 31 double in_spin, double out_spin, 31 32 double s_theta); -
src/sas/sascalc/calculator/c_extensions/sld2i_module.c
rf54e82cf rb8080e1 35 35 void 36 36 del_sld2i(PyObject *obj){ 37 #if PY_MAJOR_VERSION < 3 38 GenI* sld2i = (GenI *)obj; 39 #else 37 40 GenI* sld2i = (GenI *)(PyCapsule_GetPointer(obj, "GenI")); 41 #endif 38 42 PyMem_Free((void *)sld2i); 39 43 } … … 51 55 PyObject *mz_val_obj; 52 56 PyObject *vol_pix_obj; 53 Py_ssize_t n_x; 54 //PyObject rlimit_obj; 55 //PyObject npoints_obj; 56 //PyObject nrbins_obj; 57 //PyObject nphibins_obj; 58 int n_pix; 57 Py_ssize_t n_x, n_y, n_z, n_sld, n_mx, n_my, n_mz, n_vol_pix; 58 int is_avg; 59 59 double* x_val; 60 60 double* y_val; … … 68 68 double outspin; 69 69 double stheta; 70 GenI *sld2i; 71 72 if (!PyArg_ParseTuple(args, "iOOOOOOOOddd", &n_pix, &x_val_obj, &y_val_obj, &z_val_obj, &sldn_val_obj, &mx_val_obj, &my_val_obj, &mz_val_obj, &vol_pix_obj, &inspin, &outspin, &stheta)) return NULL; 73 OUTVECTOR(x_val_obj, x_val, n_x); 74 OUTVECTOR(y_val_obj, y_val, n_x); 75 OUTVECTOR(z_val_obj, z_val, n_x); 76 OUTVECTOR(sldn_val_obj, sldn_val, n_x); 77 OUTVECTOR(mx_val_obj, mx_val, n_x); 78 OUTVECTOR(my_val_obj, my_val, n_x); 79 OUTVECTOR(mz_val_obj, mz_val, n_x); 80 OUTVECTOR(vol_pix_obj, vol_pix, n_x); 81 sld2i = PyMem_Malloc(sizeof(GenI)); 70 PyObject *obj; 71 GenI* sld2i; 72 73 //printf("new GenI\n"); 74 if (!PyArg_ParseTuple(args, "iOOOOOOOOddd", &is_avg, &x_val_obj, &y_val_obj, &z_val_obj, &sldn_val_obj, &mx_val_obj, &my_val_obj, &mz_val_obj, &vol_pix_obj, &inspin, &outspin, &stheta)) return NULL; 75 INVECTOR(x_val_obj, x_val, n_x); 76 INVECTOR(y_val_obj, y_val, n_y); 77 INVECTOR(z_val_obj, z_val, n_z); 78 INVECTOR(sldn_val_obj, sldn_val, n_sld); 79 INVECTOR(mx_val_obj, mx_val, n_mx); 80 INVECTOR(my_val_obj, my_val, n_my); 81 INVECTOR(mz_val_obj, mz_val, n_mz); 82 INVECTOR(vol_pix_obj, vol_pix, n_vol_pix); 83 sld2i = PyMem_Malloc(sizeof(GenI)); 84 //printf("sldi:%p\n", sld2i); 82 85 if (sld2i != NULL) { 83 initGenI(sld2i, n_pix,x_val,y_val,z_val,sldn_val,mx_val,my_val,mz_val,vol_pix,inspin,outspin,stheta);86 initGenI(sld2i,is_avg,(int)n_x,x_val,y_val,z_val,sldn_val,mx_val,my_val,mz_val,vol_pix,inspin,outspin,stheta); 84 87 } 85 return PyCapsule_New(sld2i, "GenI", del_sld2i); 88 obj = PyCapsule_New(sld2i, "GenI", del_sld2i); 89 //printf("constructed %p\n", obj); 90 return obj; 86 91 } 87 92 … … 90 95 */ 91 96 PyObject * genicom_inputXY(PyObject *self, PyObject *args) { 92 int npoints;97 PyObject *gen_obj; 93 98 PyObject *qx_obj; 99 PyObject *qy_obj; 100 PyObject *I_out_obj; 101 Py_ssize_t n_qx, n_qy, n_out; 94 102 double *qx; 95 PyObject *qy_obj;96 103 double *qy; 97 PyObject *I_out_obj;98 Py_ssize_t n_out;99 104 double *I_out; 100 PyObject *gen_obj; 101 GenI *sld2i; 102 103 if (!PyArg_ParseTuple(args, "OiOOO", &gen_obj, &npoints, &qx_obj, &qy_obj, &I_out_obj)) return NULL; 104 OUTVECTOR(qx_obj, qx, n_out); 105 OUTVECTOR(qy_obj, qy, n_out); 105 GenI* sld2i; 106 107 //printf("in genicom_inputXY\n"); 108 if (!PyArg_ParseTuple(args, "OOOO", &gen_obj, &qx_obj, &qy_obj, &I_out_obj)) return NULL; 109 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 110 INVECTOR(qx_obj, qx, n_qx); 111 INVECTOR(qy_obj, qy, n_qy); 106 112 OUTVECTOR(I_out_obj, I_out, n_out); 113 //printf("qx, qy, I_out: %d %d %d, %d %d %d\n", qx, qy, I_out, n_qx, n_qy, n_out); 107 114 108 115 // Sanity check 109 //if(n_in!=n_out) return Py_BuildValue("i",-1); 110 111 // Set the array pointers 112 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 113 114 genicomXY(sld2i, npoints, qx, qy, I_out); 116 //if(n_q!=n_out) return Py_BuildValue("i",-1); 117 118 genicomXY(sld2i, (int)n_qx, qx, qy, I_out); 119 //printf("done calc\n"); 115 120 //return PyCObject_FromVoidPtr(s, del_genicom); 116 121 return Py_BuildValue("i",1); … … 121 126 */ 122 127 PyObject * genicom_input(PyObject *self, PyObject *args) { 123 int npoints;128 PyObject *gen_obj; 124 129 PyObject *q_obj; 130 PyObject *I_out_obj; 131 Py_ssize_t n_q, n_out; 125 132 double *q; 126 PyObject *I_out_obj;127 Py_ssize_t n_out;128 133 double *I_out; 129 PyObject *gen_obj;130 134 GenI *sld2i; 131 135 132 if (!PyArg_ParseTuple(args, "OiOO", &gen_obj, &npoints, &q_obj, &I_out_obj)) return NULL; 133 OUTVECTOR(q_obj, q, n_out); 136 if (!PyArg_ParseTuple(args, "OOO", &gen_obj, &q_obj, &I_out_obj)) return NULL; 137 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 138 INVECTOR(q_obj, q, n_q); 134 139 OUTVECTOR(I_out_obj, I_out, n_out); 135 140 136 141 // Sanity check 137 //if(n_in!=n_out) return Py_BuildValue("i",-1); 138 139 // Set the array pointers 140 sld2i = (GenI *)PyCapsule_GetPointer(gen_obj, "GenI"); 141 142 genicom(sld2i, npoints, q, I_out); 143 //return PyCObject_FromVoidPtr(s, del_genicom); 142 //if (n_q!=n_out) return Py_BuildValue("i",-1); 143 144 genicom(sld2i, (int)n_q, q, I_out); 144 145 return Py_BuildValue("i",1); 145 146 } -
src/sas/sascalc/calculator/instrument.py
r574adc7 rb8080e1 314 314 """ 315 315 To plot the wavelength spactrum 316 : requir ment: matplotlib.pyplot316 : requirement: matplotlib.pyplot 317 317 """ 318 318 try: -
src/sas/sascalc/calculator/resolution_calculator.py
r8f83719f rb8080e1 1007 1007 try: 1008 1008 detector_offset = self.sample2detector_distance[1] 1009 except Exception as ex:1010 logger.error( ex)1009 except: 1010 logger.error(sys.exc_value) 1011 1011 1012 1012 # detector size in [no of pix_x,no of pix_y] … … 1057 1057 # qx_value and qy_value values in array 1058 1058 qx_value = qx_value.repeat(detector_pix_nums_y) 1059 qx_value = qx_value.reshape( int(detector_pix_nums_x), int(detector_pix_nums_y))1059 qx_value = qx_value.reshape(detector_pix_nums_x, detector_pix_nums_y) 1060 1060 qy_value = qy_value.repeat(detector_pix_nums_x) 1061 qy_value = qy_value.reshape( int(detector_pix_nums_y), int(detector_pix_nums_x))1061 qy_value = qy_value.reshape(detector_pix_nums_y, detector_pix_nums_x) 1062 1062 qy_value = qy_value.transpose() 1063 1063 … … 1094 1094 output.qx_data = qx_value 1095 1095 output.qy_data = qy_value 1096 except Exception as ex:1097 logger.error( ex)1096 except: 1097 logger.error(sys.exc_value) 1098 1098 1099 1099 return output -
src/sas/sascalc/calculator/sas_gen.py
rb58265c3 rb8080e1 118 118 self.is_avg = is_avg 119 119 120 def _gen(self, x, y, i):120 def _gen(self, qx, qy): 121 121 """ 122 122 Evaluate the function … … 129 129 pos_y = self.data_y 130 130 pos_z = self.data_z 131 len_x = len(pos_x)132 131 if self.is_avg is None: 133 len_x *= -1134 132 pos_x, pos_y, pos_z = transform_center(pos_x, pos_y, pos_z) 135 len_q = len(x)136 133 sldn = copy.deepcopy(self.data_sldn) 137 134 sldn -= self.params['solvent_SLD'] 138 model = mod.new_GenI(len_x, pos_x, pos_y, pos_z, 139 sldn, self.data_mx, self.data_my, 140 self.data_mz, self.data_vol, 141 self.params['Up_frac_in'], 142 self.params['Up_frac_out'], 143 self.params['Up_theta']) 144 if y == []: 145 mod.genicom(model, len_q, x, i) 146 else: 147 mod.genicomXY(model, len_q, x, y, i) 135 # **** WARNING **** new_GenI holds pointers to numpy vectors 136 # be sure that they are contiguous double precision arrays and make 137 # sure the GC doesn't eat them before genicom is called. 138 # TODO: rewrite so that the parameters are passed directly to genicom 139 args = ( 140 (1 if self.is_avg else 0), 141 pos_x, pos_y, pos_z, 142 sldn, self.data_mx, self.data_my, 143 self.data_mz, self.data_vol, 144 self.params['Up_frac_in'], 145 self.params['Up_frac_out'], 146 self.params['Up_theta']) 147 model = mod.new_GenI(*args) 148 if len(qy): 149 qx, qy = _vec(qx), _vec(qy) 150 I_out = np.empty_like(qx) 151 #print("npoints", qx.shape, "npixels", pos_x.shape) 152 mod.genicomXY(model, qx, qy, I_out) 153 #print("I_out after", I_out) 154 else: 155 qx = _vec(qx) 156 I_out = np.empty_like(qx) 157 mod.genicom(model, qx, I_out) 148 158 vol_correction = self.data_total_volume / self.params['total_volume'] 149 return self.params['scale'] * vol_correction * i + \ 150 self.params['background'] 159 result = (self.params['scale'] * vol_correction * I_out 160 + self.params['background']) 161 return result 151 162 152 163 def set_sld_data(self, sld_data=None): … … 156 167 self.sld_data = sld_data 157 168 self.data_pos_unit = sld_data.pos_unit 158 self.data_x = sld_data.pos_x159 self.data_y = sld_data.pos_y160 self.data_z = sld_data.pos_z161 self.data_sldn = sld_data.sld_n162 self.data_mx = sld_data.sld_mx163 self.data_my = sld_data.sld_my164 self.data_mz = sld_data.sld_mz165 self.data_vol = sld_data.vol_pix169 self.data_x = _vec(sld_data.pos_x) 170 self.data_y = _vec(sld_data.pos_y) 171 self.data_z = _vec(sld_data.pos_z) 172 self.data_sldn = _vec(sld_data.sld_n) 173 self.data_mx = _vec(sld_data.sld_mx) 174 self.data_my = _vec(sld_data.sld_my) 175 self.data_mz = _vec(sld_data.sld_mz) 176 self.data_vol = _vec(sld_data.vol_pix) 166 177 self.data_total_volume = sum(sld_data.vol_pix) 167 178 self.params['total_volume'] = sum(sld_data.vol_pix) … … 180 191 :return: (I value) 181 192 """ 182 if x.__class__.__name__ == 'list':193 if isinstance(x, list): 183 194 if len(x[1]) > 0: 184 195 msg = "Not a 1D." 185 196 raise ValueError(msg) 186 i_out = np.zeros_like(x[0])187 197 # 1D I is found at y =0 in the 2D pattern 188 out = self._gen(x[0], [] , i_out)198 out = self._gen(x[0], []) 189 199 return out 190 200 else: … … 199 209 :Use this runXY() for the computation 200 210 """ 201 if x.__class__.__name__ == 'list': 202 i_out = np.zeros_like(x[0]) 203 out = self._gen(x[0], x[1], i_out) 204 return out 211 if isinstance(x, list): 212 return self._gen(x[0], x[1]) 205 213 else: 206 214 msg = "Q must be given as list of qx's and qy's" … … 214 222 where qx,qy are 1D ndarrays (for 2D). 215 223 """ 216 if qdist.__class__.__name__ == 'list': 217 if len(qdist[1]) < 1: 218 out = self.run(qdist) 219 else: 220 out = self.runXY(qdist) 221 return out 224 if isinstance(qdist, list): 225 return self.run(qdist) if len(qdist[1]) < 1 else self.runXY(qdist) 222 226 else: 223 227 mesg = "evalDistribution is expecting an ndarray of " 224 228 mesg += "a list [qx,qy] where qx,qy are arrays." 225 229 raise RuntimeError(mesg) 230 231 def _vec(v): 232 return np.ascontiguousarray(v, 'd') 226 233 227 234 class OMF2SLD(object): … … 1041 1048 self.line_z = line_z 1042 1049 1050 def _get_data_path(*path_parts): 1051 from os.path import realpath, join as joinpath, dirname, abspath 1052 # in sas/sascalc/calculator; want sas/sasview/test 1053 return joinpath(dirname(realpath(__file__)), 1054 '..', '..', 'sasview', 'test', *path_parts) 1055 1043 1056 def test_load(): 1044 1057 """ … … 1046 1059 """ 1047 1060 from mpl_toolkits.mplot3d import Axes3D 1048 current_dir = os.path.abspath(os.path.curdir) 1049 print(current_dir) 1050 for i in range(6): 1051 current_dir, _ = os.path.split(current_dir) 1052 tfile = os.path.join(current_dir, "test", "CoreXY_ShellZ.txt") 1053 ofile = os.path.join(current_dir, "test", "A_Raw_Example-1.omf") 1054 if os.path.isfile(tfile): 1055 tfpath = tfile 1056 ofpath = ofile 1057 break 1061 tfpath = _get_data_path("1d_data", "CoreXY_ShellZ.txt") 1062 ofpath = _get_data_path("coordinate_data", "A_Raw_Example-1.omf") 1063 if not os.path.isfile(tfpath) or not os.path.isfile(ofpath): 1064 raise ValueError("file(s) not found: %r, %r"%(tfpath, ofpath)) 1058 1065 reader = SLDReader() 1059 1066 oreader = OMFReader() 1060 output = decode(reader.read(tfpath))1061 ooutput = decode(oreader.read(ofpath))1067 output = reader.read(tfpath) 1068 ooutput = oreader.read(ofpath) 1062 1069 foutput = OMF2SLD() 1063 1070 foutput.set_data(ooutput) … … 1088 1095 plt.show() 1089 1096 1097 def test_save(): 1098 ofpath = _get_data_path("coordinate_data", "A_Raw_Example-1.omf") 1099 if not os.path.isfile(ofpath): 1100 raise ValueError("file(s) not found: %r"%(ofpath,)) 1101 oreader = OMFReader() 1102 omfdata = oreader.read(ofpath) 1103 omf2sld = OMF2SLD() 1104 omf2sld.set_data(omfdata) 1105 writer = SLDReader() 1106 writer.write("out.txt", omf2sld.output) 1107 1090 1108 def test(): 1091 1109 """ 1092 1110 Test code 1093 1111 """ 1094 current_dir = os.path.abspath(os.path.curdir) 1095 for i in range(3): 1096 current_dir, _ = os.path.split(current_dir) 1097 ofile = os.path.join(current_dir, "test", "A_Raw_Example-1.omf") 1098 if os.path.isfile(ofile): 1099 ofpath = ofile 1100 break 1112 ofpath = _get_data_path("coordinate_data", "A_Raw_Example-1.omf") 1113 if not os.path.isfile(ofpath): 1114 raise ValueError("file(s) not found: %r"%(ofpath,)) 1101 1115 oreader = OMFReader() 1102 ooutput = decode(oreader.read(ofpath)) 1103 foutput = OMF2SLD() 1104 foutput.set_data(ooutput) 1105 writer = SLDReader() 1106 writer.write(os.path.join(os.path.dirname(ofpath), "out.txt"), 1107 foutput.output) 1116 omfdata = oreader.read(ofpath) 1117 omf2sld = OMF2SLD() 1118 omf2sld.set_data(omfdata) 1108 1119 model = GenSAS() 1109 model.set_sld_data(foutput.output) 1110 x = np.arange(1000)/10000. + 1e-5 1111 y = np.arange(1000)/10000. + 1e-5 1112 i = np.zeros(1000) 1113 model.runXY([x, y, i]) 1120 model.set_sld_data(omf2sld.output) 1121 x = np.linspace(0, 0.1, 11)[1:] 1122 return model.runXY([x, x]) 1114 1123 1115 1124 if __name__ == "__main__": 1125 #test_load() 1126 #test_save() 1127 #print(test()) 1116 1128 test() 1117 test_load() -
src/sas/sascalc/dataloader/data_info.py
r749b715 rb8080e1 775 775 clone.meta_data = deepcopy(self.meta_data) 776 776 clone.errors = deepcopy(self.errors) 777 clone.isSesans = self.isSesans778 777 779 778 return clone -
src/sas/sascalc/dataloader/file_reader_base_class.py
r9e6aeaf rb8080e1 7 7 import os 8 8 import sys 9 import re9 import math 10 10 import logging 11 11 from abc import abstractmethod … … 26 26 return s.decode() if isinstance(s, bytes) else s 27 27 28 # Data 1D fields for iterative purposes 29 FIELDS_1D = ('x', 'y', 'dx', 'dy', 'dxl', 'dxw') 30 # Data 2D fields for iterative purposes 31 FIELDS_2D = ('data', 'qx_data', 'qy_data', 'q_data', 'err_data', 32 'dqx_data', 'dqy_data', 'mask') 33 DEPRECATION_MESSAGE = ("\rThe extension of this file suggests the data set migh" 34 "t not be fully reduced. Support for the reader associat" 35 "ed with this file type has been removed. An attempt to " 36 "load the file was made, but, should it be successful, " 37 "SasView cannot guarantee the accuracy of the data.") 38 28 39 class FileReader(object): 29 # List of Data1D and Data2D objects to be sent back to data_loader30 output = []31 # Current plottable_(1D/2D) object being loaded in32 current_dataset = None33 # Current DataInfo object being loaded in34 current_datainfo = None35 40 # String to describe the type of data this reader can load 36 41 type_name = "ASCII" … … 39 44 # List of allowed extensions 40 45 ext = ['.txt'] 46 # Deprecated extensions 47 deprecated_extensions = ['.asc', '.nxs'] 41 48 # Bypass extension check and try to load anyway 42 49 allow_all = False 43 50 # Able to import the unit converter 44 51 has_converter = True 45 # Open file handle46 f_open = None47 52 # Default value of zero 48 53 _ZERO = 1e-16 49 54 55 def __init__(self): 56 # List of Data1D and Data2D objects to be sent back to data_loader 57 self.output = [] 58 # Current plottable_(1D/2D) object being loaded in 59 self.current_dataset = None 60 # Current DataInfo object being loaded in 61 self.current_datainfo = None 62 # File path sent to reader 63 self.filepath = None 64 # Open file handle 65 self.f_open = None 66 50 67 def read(self, filepath): 51 68 """ … … 54 71 :param filepath: The full or relative path to a file to be loaded 55 72 """ 73 self.filepath = filepath 56 74 if os.path.isfile(filepath): 57 75 basename, extension = os.path.splitext(os.path.basename(filepath)) … … 75 93 if not self.f_open.closed: 76 94 self.f_open.close() 95 if any(filepath.lower().endswith(ext) for ext in 96 self.deprecated_extensions): 97 self.handle_error_message(DEPRECATION_MESSAGE) 77 98 if len(self.output) > 0: 78 99 # Sort the data that's been loaded … … 85 106 86 107 # Return a list of parsed entries that data_loader can manage 87 return self.output 108 final_data = self.output 109 self.reset_state() 110 return final_data 111 112 def reset_state(self): 113 """ 114 Resets the class state to a base case when loading a new data file so previous 115 data files do not appear a second time 116 """ 117 self.current_datainfo = None 118 self.current_dataset = None 119 self.filepath = None 120 self.ind = None 121 self.output = [] 88 122 89 123 def nextline(self): … … 112 146 """ 113 147 Generic error handler to add an error to the current datainfo to 114 prop ogate the error up the error chain.148 propagate the error up the error chain. 115 149 :param msg: Error message 116 150 """ … … 121 155 else: 122 156 logger.warning(msg) 157 raise NoKnownLoaderException(msg) 123 158 124 159 def send_to_output(self): … … 142 177 # Sort data by increasing x and remove 1st point 143 178 ind = np.lexsort((data.y, data.x)) 144 data.x = np.asarray([data.x[i] for i in ind]).astype(np.float64)145 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64)179 data.x = self._reorder_1d_array(data.x, ind) 180 data.y = self._reorder_1d_array(data.y, ind) 146 181 if data.dx is not None: 147 182 if len(data.dx) == 0: 148 183 data.dx = None 149 184 continue 150 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64)185 data.dx = self._reorder_1d_array(data.dx, ind) 151 186 if data.dxl is not None: 152 data.dxl = np.asarray([data.dxl[i] for i in ind]).astype(np.float64)187 data.dxl = self._reorder_1d_array(data.dxl, ind) 153 188 if data.dxw is not None: 154 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64)189 data.dxw = self._reorder_1d_array(data.dxw, ind) 155 190 if data.dy is not None: 156 191 if len(data.dy) == 0: 157 192 data.dy = None 158 193 continue 159 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64)194 data.dy = self._reorder_1d_array(data.dy, ind) 160 195 if data.lam is not None: 161 data.lam = np.asarray([data.lam[i] for i in ind]).astype(np.float64)196 data.lam = self._reorder_1d_array(data.lam, ind) 162 197 if data.dlam is not None: 163 data.dlam = np.asarray([data.dlam[i] for i in ind]).astype(np.float64) 198 data.dlam = self._reorder_1d_array(data.dlam, ind) 199 data = self._remove_nans_in_data(data) 164 200 if len(data.x) > 0: 165 201 data.xmin = np.min(data.x) … … 167 203 data.ymin = np.min(data.y) 168 204 data.ymax = np.max(data.y) 205 206 @staticmethod 207 def _reorder_1d_array(array, ind): 208 """ 209 Reorders a 1D array based on the indices passed as ind 210 :param array: Array to be reordered 211 :param ind: Indices used to reorder array 212 :return: reordered array 213 """ 214 array = np.asarray(array, dtype=np.float64) 215 return array[ind] 216 217 @staticmethod 218 def _remove_nans_in_data(data): 219 """ 220 Remove data points where nan is loaded 221 :param data: 1D or 2D data object 222 :return: data with nan points removed 223 """ 224 if isinstance(data, Data1D): 225 fields = FIELDS_1D 226 elif isinstance(data, Data2D): 227 fields = FIELDS_2D 228 else: 229 return data 230 # Make array of good points - all others will be removed 231 good = np.isfinite(getattr(data, fields[0])) 232 for name in fields[1:]: 233 array = getattr(data, name) 234 if array is not None: 235 # Update good points only if not already changed 236 good &= np.isfinite(array) 237 if not np.all(good): 238 for name in fields: 239 array = getattr(data, name) 240 if array is not None: 241 setattr(data, name, array[good]) 242 return data 169 243 170 244 def sort_two_d_data(self): … … 197 271 dataset.x_bins = dataset.qx_data[:int(n_cols)] 198 272 dataset.data = dataset.data.flatten() 273 dataset = self._remove_nans_in_data(dataset) 199 274 if len(dataset.data) > 0: 200 275 dataset.xmin = np.min(dataset.qx_data) … … 314 389 def splitline(line): 315 390 """ 316 Splits a line into pieces based on common delim eters391 Splits a line into pieces based on common delimiters 317 392 :param line: A single line of text 318 393 :return: list of values -
src/sas/sascalc/dataloader/loader.py
rdc8d1c2 rb8080e1 90 90 ascii_loader = ascii_reader.Reader() 91 91 return ascii_loader.read(path) 92 except NoKnownLoaderException: 93 pass # Try the Cansas XML reader 92 94 except DefaultReaderException: 93 95 pass # Loader specific error to try the cansas XML reader … … 100 102 cansas_loader = cansas_reader.Reader() 101 103 return cansas_loader.read(path) 104 except NoKnownLoaderException: 105 pass # Try the NXcanSAS reader 102 106 except DefaultReaderException: 103 107 pass # Loader specific error to try the NXcanSAS reader -
src/sas/sascalc/dataloader/readers/abs_reader.py
r1efbc190 rb8080e1 29 29 type_name = "IGOR 1D" 30 30 # Wildcards 31 type = ["IGOR 1D files (*.abs)|*.abs" ]31 type = ["IGOR 1D files (*.abs)|*.abs", "IGOR 1D USANS files (*.cor)|*.cor"] 32 32 # List of allowed extensions 33 ext = ['.abs' ]33 ext = ['.abs', '.cor'] 34 34 35 35 def get_file_contents(self): … … 46 46 self.current_datainfo = DataInfo() 47 47 self.current_datainfo.filename = filepath 48 self.reset_data_list(len(lines))49 48 detector = Detector() 50 49 data_line = 0 … … 172 171 173 172 try: 174 _x = float(toks[ 0])173 _x = float(toks[4]) 175 174 _y = float(toks[1]) 176 175 _dy = float(toks[2]) … … 188 187 self.current_dataset.y[data_line] = _y 189 188 self.current_dataset.dy[data_line] = _dy 190 self.current_dataset.dx[data_line] = _dx 189 if _dx > 0: 190 self.current_dataset.dx[data_line] = _dx 191 else: 192 if data_line == 0: 193 self.current_dataset.dx = None 194 self.current_dataset.dxl = np.zeros(len(lines)) 195 self.current_dataset.dxw = np.zeros(len(lines)) 196 self.current_dataset.dxl[data_line] = abs(_dx) 197 self.current_dataset.dxw[data_line] = 0 191 198 data_line += 1 192 199 … … 197 204 pass 198 205 206 # SANS Data: 199 207 # The 6 columns are | Q (1/A) | I(Q) (1/cm) | std. dev. 200 208 # I(Q) (1/cm) | sigmaQ | meanQ | ShadowFactor| 201 if line.count("The 6 columns") > 0: 209 # USANS Data: 210 # EMP LEVEL: <value> ; BKG LEVEL: <value> 211 if line.startswith("The 6 columns") or line.startswith("EMP LEVEL"): 202 212 is_data_started = True 203 213 -
src/sas/sascalc/dataloader/readers/associations.py
r574adc7 rb8080e1 26 26 ".dat": "red2d_reader", 27 27 ".abs": "abs_reader", 28 ".cor": "abs_reader", 28 29 ".sans": "danse_reader", 29 30 ".pdh": "anton_paar_saxs_reader" -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r2b538cd rb8080e1 68 68 data files do not appear a second time 69 69 """ 70 self.current_datainfo = None 71 self.current_dataset = None 72 self.current_data1d = None 70 super(Reader, self).reset_state() 73 71 self.data = [] 74 72 self.process = Process() … … 79 77 self.names = [] 80 78 self.cansas_defaults = {} 81 self.output = []82 79 self.ns_list = None 83 80 self.logging = [] … … 85 82 86 83 def read(self, xml_file, schema_path="", invalid=True): 87 if schema_path != "" or invalid != True:84 if schema_path != "" or not invalid: 88 85 # read has been called from self.get_file_contents because xml file doens't conform to schema 89 86 _, self.extension = os.path.splitext(os.path.basename(xml_file)) … … 945 942 pos, "z", datainfo.sample.position.z, 946 943 {"unit": datainfo.sample.position_unit}) 947 if written == True:944 if written: 948 945 self.append(pos, sample) 949 946 … … 958 955 ori, "yaw", datainfo.sample.orientation.z, 959 956 {"unit": datainfo.sample.orientation_unit}) 960 if written == True:957 if written: 961 958 self.append(ori, sample) 962 959 … … 1005 1002 size, "z", datainfo.source.beam_size.z, 1006 1003 {"unit": datainfo.source.beam_size_unit}) 1007 if written == True:1004 if written: 1008 1005 self.append(size, source) 1009 1006 … … 1061 1058 size, "z", aperture.size.z, 1062 1059 {"unit": aperture.size_unit}) 1063 if written == True:1060 if written: 1064 1061 self.append(size, apert) 1065 1062 … … 1084 1081 written = written | self.write_node(det, "SDD", item.distance, 1085 1082 {"unit": item.distance_unit}) 1086 if written == True:1083 if written: 1087 1084 self.append(det, instr) 1088 1085 … … 1094 1091 written = written | self.write_node(off, "z", item.offset.z, 1095 1092 {"unit": item.offset_unit}) 1096 if written == True:1093 if written: 1097 1094 self.append(off, det) 1098 1095 … … 1106 1103 item.orientation.z, 1107 1104 {"unit": item.orientation_unit}) 1108 if written == True:1105 if written: 1109 1106 self.append(ori, det) 1110 1107 … … 1118 1115 item.beam_center.z, 1119 1116 {"unit": item.beam_center_unit}) 1120 if written == True:1117 if written: 1121 1118 self.append(center, det) 1122 1119 … … 1128 1125 written = written | self.write_node(pix, "z", item.pixel_size.z, 1129 1126 {"unit": item.pixel_size_unit}) 1130 if written == True:1127 if written: 1131 1128 self.append(pix, det) 1132 1129 self.write_node(det, "slit_length", item.slit_length, -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc416a17 rb8080e1 9 9 import sys 10 10 11 from sas.sascalc.dataloader.data_info import plottable_1D, plottable_2D,\11 from ..data_info import plottable_1D, plottable_2D,\ 12 12 Data1D, Data2D, DataInfo, Process, Aperture, Collimation, \ 13 13 TransmissionSpectrum, Detector 14 from sas.sascalc.dataloader.data_info import combine_data_info_with_plottable 15 16 17 class Reader(): 14 from ..data_info import combine_data_info_with_plottable 15 from ..loader_exceptions import FileContentsException, DefaultReaderException 16 from ..file_reader_base_class import FileReader, decode 17 18 def h5attr(node, key, default=None): 19 return decode(node.attrs.get(key, default)) 20 21 class Reader(FileReader): 18 22 """ 19 23 A class for reading in CanSAS v2.0 data files. The existing iteration opens … … 25 29 Any number of SASdata sets may be present in a SASentry and the data within 26 30 can be either 1D I(Q) or 2D I(Qx, Qy). 31 27 32 Also supports reading NXcanSAS formatted HDF5 files 28 33 … … 39 44 # Raw file contents to be processed 40 45 raw_data = None 41 # Data info currently being read in42 current_datainfo = None43 # SASdata set currently being read in44 current_dataset = None45 46 # List of plottable1D objects that should be linked to the current_datainfo 46 47 data1d = None … … 55 56 # Flag to bypass extension check 56 57 allow_all = True 57 # List of files to return 58 output = None 59 60 def read(self, filename): 58 59 def get_file_contents(self): 61 60 """ 62 61 This is the general read method that all SasView data_loaders must have. … … 66 65 """ 67 66 # Reinitialize when loading a new data file to reset all class variables 68 self.reset_class_variables() 67 self.reset_state() 68 69 filename = self.f_open.name 70 self.f_open.close() # IO handled by h5py 71 69 72 # Check that the file exists 70 73 if os.path.isfile(filename): … … 74 77 if extension in self.ext or self.allow_all: 75 78 # Load the data file 76 self.raw_data = h5py.File(filename, 'r') 77 # Read in all child elements of top level SASroot 78 self.read_children(self.raw_data, []) 79 # Add the last data set to the list of outputs 80 self.add_data_set() 81 # Close the data file 82 self.raw_data.close() 83 # Return data set(s) 84 return self.output 85 86 def reset_class_variables(self): 79 try: 80 self.raw_data = h5py.File(filename, 'r') 81 except Exception as e: 82 if extension not in self.ext: 83 msg = "CanSAS2.0 HDF5 Reader could not load file {}".format(basename + extension) 84 raise DefaultReaderException(msg) 85 raise FileContentsException(e.message) 86 try: 87 # Read in all child elements of top level SASroot 88 self.read_children(self.raw_data, []) 89 # Add the last data set to the list of outputs 90 self.add_data_set() 91 except Exception as exc: 92 raise FileContentsException(exc.message) 93 finally: 94 # Close the data file 95 self.raw_data.close() 96 97 for dataset in self.output: 98 if isinstance(dataset, Data1D): 99 if dataset.x.size < 5: 100 self.output = [] 101 raise FileContentsException("Fewer than 5 data points found.") 102 103 def reset_state(self): 87 104 """ 88 105 Create the reader object and define initial states for class variables 89 106 """ 90 self.current_datainfo = None 91 self.current_dataset = None 107 super(Reader, self).reset_state() 92 108 self.data1d = [] 93 109 self.data2d = [] … … 95 111 self.errors = set() 96 112 self.logging = [] 97 self.output = []98 113 self.parent_class = u'' 99 114 self.detector = Detector() … … 115 130 # Get all information for the current key 116 131 value = data.get(key) 117 if value.attrs.get(u'canSAS_class') is not None: 118 class_name = value.attrs.get(u'canSAS_class') 119 else: 120 class_name = value.attrs.get(u'NX_class') 132 class_name = h5attr(value, u'canSAS_class') 133 if class_name is None: 134 class_name = h5attr(value, u'NX_class') 121 135 if class_name is not None: 122 136 class_prog = re.compile(class_name) … … 125 139 126 140 if isinstance(value, h5py.Group): 141 # Set parent class before recursion 127 142 self.parent_class = class_name 128 143 parent_list.append(key) … … 135 150 # Recursion step to access data within the group 136 151 self.read_children(value, parent_list) 152 # Reset parent class when returning from recursive method 153 self.parent_class = class_name 137 154 self.add_intermediate() 138 155 parent_list.remove(key) … … 165 182 self.current_dataset.x = data_set.flatten() 166 183 continue 184 elif key == u'Qdev': 185 self.current_dataset.dx = data_set.flatten() 186 continue 187 elif key == u'dQw': 188 self.current_dataset.dxw = data_set.flatten() 189 continue 190 elif key == u'dQl': 191 self.current_dataset.dxl = data_set.flatten() 192 continue 167 193 elif key == u'Qy': 168 194 self.current_dataset.yaxis("Q_y", unit) … … 198 224 199 225 for data_point in data_set: 226 if isinstance(data_point, np.ndarray): 227 if data_point.dtype.char == 'S': 228 data_point = decode(bytes(data_point)) 229 else: 230 data_point = decode(data_point) 200 231 # Top Level Meta Data 201 232 if key == u'definition': … … 203 234 elif key == u'run': 204 235 self.current_datainfo.run.append(data_point) 236 try: 237 run_name = h5attr(value, 'name') 238 run_dict = {data_point: run_name} 239 self.current_datainfo.run_name = run_dict 240 except Exception: 241 pass 205 242 elif key == u'title': 206 243 self.current_datainfo.title = data_point … … 411 448 Data1D and Data2D objects 412 449 """ 413 414 450 # Type cast data arrays to float64 415 451 if len(self.current_datainfo.trans_spectrum) > 0: … … 435 471 # Type cast data arrays to float64 and find min/max as appropriate 436 472 for dataset in self.data2d: 437 dataset.data = dataset.data.astype(np.float64)438 dataset.err_data = dataset.err_data.astype(np.float64)439 if dataset.qx_data is not None:440 dataset.xmin = np.min(dataset.qx_data)441 dataset.xmax = np.max(dataset.qx_data)442 dataset.qx_data = dataset.qx_data.astype(np.float64)443 if dataset.dqx_data is not None:444 dataset.dqx_data = dataset.dqx_data.astype(np.float64)445 if dataset.qy_data is not None:446 dataset.ymin = np.min(dataset.qy_data)447 dataset.ymax = np.max(dataset.qy_data)448 dataset.qy_data = dataset.qy_data.astype(np.float64)449 if dataset.dqy_data is not None:450 dataset.dqy_data = dataset.dqy_data.astype(np.float64)451 if dataset.q_data is not None:452 dataset.q_data = dataset.q_data.astype(np.float64)453 473 zeros = np.ones(dataset.data.size, dtype=bool) 454 474 try: … … 473 493 dataset.x_bins = dataset.qx_data[:n_cols] 474 494 dataset.data = dataset.data.flatten() 475 476 final_dataset = combine_data_info_with_plottable( 477 dataset, self.current_datainfo) 478 self.output.append(final_dataset) 495 self.current_dataset = dataset 496 self.send_to_output() 479 497 480 498 for dataset in self.data1d: 481 if dataset.x is not None: 482 dataset.x = dataset.x.astype(np.float64) 483 dataset.xmin = np.min(dataset.x) 484 dataset.xmax = np.max(dataset.x) 485 if dataset.y is not None: 486 dataset.y = dataset.y.astype(np.float64) 487 dataset.ymin = np.min(dataset.y) 488 dataset.ymax = np.max(dataset.y) 489 if dataset.dx is not None: 490 dataset.dx = dataset.dx.astype(np.float64) 491 if dataset.dxl is not None: 492 dataset.dxl = dataset.dxl.astype(np.float64) 493 if dataset.dxw is not None: 494 dataset.dxw = dataset.dxw.astype(np.float64) 495 if dataset.dy is not None: 496 dataset.dy = dataset.dy.astype(np.float64) 497 final_dataset = combine_data_info_with_plottable( 498 dataset, self.current_datainfo) 499 self.output.append(final_dataset) 499 self.current_dataset = dataset 500 self.send_to_output() 500 501 501 502 def add_data_set(self, key=""): … … 579 580 :return: unit for the value passed to the method 580 581 """ 581 unit = value.attrs.get(u'units')582 unit = h5attr(value, u'units') 582 583 if unit is None: 583 unit = value.attrs.get(u'unit')584 unit = h5attr(value, u'unit') 584 585 # Convert the unit formats 585 586 if unit == "1/A": -
src/sas/sascalc/dataloader/readers/danse_reader.py
rcee5c78 rb8080e1 157 157 # Store all data 158 158 # Store wavelength 159 if has_converter == Trueand self.current_datainfo.source.wavelength_unit != 'A':159 if has_converter and self.current_datainfo.source.wavelength_unit != 'A': 160 160 conv = Converter('A') 161 161 wavelength = conv(wavelength, … … 164 164 165 165 # Store distance 166 if has_converter == Trueand detector.distance_unit != 'm':166 if has_converter and detector.distance_unit != 'm': 167 167 conv = Converter('m') 168 168 distance = conv(distance, units=detector.distance_unit) … … 170 170 171 171 # Store pixel size 172 if has_converter == Trueand detector.pixel_size_unit != 'mm':172 if has_converter and detector.pixel_size_unit != 'mm': 173 173 conv = Converter('mm') 174 174 pixel = conv(pixel, units=detector.pixel_size_unit) … … 191 191 x_vals = np.tile(x_vals, (size_y, 1)).flatten() 192 192 y_vals = np.tile(y_vals, (size_x, 1)).T.flatten() 193 if (np.all(self.current_dataset.err_data isNone)193 if (np.all(self.current_dataset.err_data == None) 194 194 or np.any(self.current_dataset.err_data <= 0)): 195 195 new_err_data = np.sqrt(np.abs(self.current_dataset.data)) -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r849094a rb8080e1 12 12 from ..file_reader_base_class import FileReader 13 13 from ..data_info import plottable_1D, DataInfo 14 from ..loader_exceptions import FileContentsException , DataReaderException14 from ..loader_exceptions import FileContentsException 15 15 16 16 # Check whether we have a converter available … … 18 18 try: 19 19 from sas.sascalc.data_util.nxsunit import Converter 20 except :20 except ImportError: 21 21 has_converter = False 22 22 _ZERO = 1e-16 … … 46 46 line = self.nextline() 47 47 params = {} 48 while not line.startswith("BEGIN_DATA"):48 while line and not line.startswith("BEGIN_DATA"): 49 49 terms = line.split() 50 50 if len(terms) >= 2: … … 63 63 raise FileContentsException("Wavelength has no units") 64 64 if params["SpinEchoLength_unit"] != params["Wavelength_unit"]: 65 raise FileContentsException("The spin echo data has rudely used " 66 "different units for the spin echo length " 67 "and the wavelength. While sasview could " 68 "handle this instance, it is a violation " 69 "of the file format and will not be " 70 "handled by other software.") 65 raise FileContentsException( 66 "The spin echo data has rudely used " 67 "different units for the spin echo length " 68 "and the wavelength. While sasview could " 69 "handle this instance, it is a violation " 70 "of the file format and will not be " 71 "handled by other software.") 71 72 72 73 headers = self.nextline().split() … … 86 87 87 88 if not data.size: 88 raise FileContentsException("{} is empty".format( path))89 raise FileContentsException("{} is empty".format(self.filepath)) 89 90 x = data[:, headers.index("SpinEchoLength")] 90 91 if "SpinEchoLength_error" in headers: -
src/sas/sascalc/file_converter/otoko_loader.py
r0b1a677 rb8080e1 36 36 37 37 Given the paths of two header files, this function will load each axis in 38 turn. If loading is successful lthen an instance of the OTOKOData class38 turn. If loading is successful then an instance of the OTOKOData class 39 39 will be returned, else an exception will be raised. 40 40 -
src/sas/sascalc/fit/AbstractFitEngine.py
racd9c311 rb8080e1 78 78 def get_params(self, fitparams): 79 79 """ 80 return a list of value of param ter to fit81 82 :param fitparams: list of param aters name to fit80 return a list of value of parameter to fit 81 82 :param fitparams: list of parameters name to fit 83 83 84 84 """ -
src/sas/sascalc/fit/MultiplicationModel.py
r574adc7 rb8080e1 68 68 try: 69 69 multiplicity = p_model.multiplicity 70 except :70 except AttributeError: 71 71 multiplicity = 1 72 72 ## functional multiplicity of the model … … 76 76 self.non_fittable = p_model.non_fittable 77 77 self.multiplicity_info = [] 78 self.fun_list = {}78 self.fun_list = [] 79 79 if self.non_fittable > 1: 80 80 try: … … 82 82 self.fun_list = p_model.fun_list 83 83 self.is_multiplicity_model = True 84 except :84 except AttributeError: 85 85 pass 86 86 else: -
src/sas/sascalc/fit/pagestate.py
r9e6aeaf rb8080e1 13 13 ################################################################################ 14 14 import time 15 import re 15 16 import os 16 17 import sys … … 646 647 name = value.split(':', 1)[1].strip() 647 648 file_value = "File name:" + name 649 #Truncating string so print doesn't complain of being outside margins 650 if sys.platform != "win32": 651 MAX_STRING_LENGHT = 50 652 if len(file_value) > MAX_STRING_LENGHT: 653 file_value = "File name:.."+file_value[-MAX_STRING_LENGHT+10:] 648 654 file_name = CENTRE % file_value 649 655 if len(title) == 0: … … 721 727 html_str, text_str, title = self._get_report_string() 722 728 # Allow 2 figures to append 723 image_links = [FEET_2%fig for fig in fig_urls] 724 729 #Constraining image width for OSX and linux, so print doesn't complain of being outside margins 730 if sys.platform == "win32": 731 image_links = [FEET_2%fig for fig in fig_urls] 732 else: 733 image_links = [FEET_2_unix%fig for fig in fig_urls] 725 734 # final report html strings 726 735 report_str = html_str + ELINE.join(image_links) 727 736 report_str += FEET_3 728 737 return report_str, text_str 729 738 … … 954 963 if node.get('version'): 955 964 # Get the version for model conversion purposes 956 self.version = tuple(int(e) for e in957 str.split(node.get('version'), "."))965 x = re.sub('[^\d.]', '', node.get('version')) 966 self.version = tuple(int(e) for e in str.split(x, ".")) 958 967 # The tuple must be at least 3 items long 959 968 while len(self.version) < 3: … … 1368 1377 """ 1369 1378 FEET_2 = \ 1370 """<img src="%s" ></img> 1379 """<img src="%s"></img> 1380 """ 1381 FEET_2_unix = \ 1382 """<img src="%s" width="540"></img> 1371 1383 """ 1372 1384 FEET_3 = \ -
src/sas/sascalc/fit/qsmearing.py
r50fcb09 rb8080e1 90 90 #print "data1D.dx[0]",data1D.dx[0],data1D.dxl[0] 91 91 # If we found resolution smearing data, return a QSmearer 92 if _found_resolution == True:92 if _found_resolution: 93 93 return pinhole_smear(data, model) 94 94 … … 113 113 break 114 114 # If we found slit smearing data, return a slit smearer 115 if _found_slit == True:115 if _found_slit: 116 116 return slit_smear(data, model) 117 117 return None -
src/sas/sascalc/pr/c_extensions/Cinvertor.c
rd04ac05 rb8080e1 735 735 736 736 const char get_peaks_doc[] = 737 "Returns the number of peaks in the output P(r) distr ubution\n"737 "Returns the number of peaks in the output P(r) distribution\n" 738 738 "for the given set of coefficients.\n" 739 739 " @param args: c-parameters\n" -
src/sas/sascalc/pr/fit/AbstractFitEngine.py
r574adc7 rb8080e1 78 78 def get_params(self, fitparams): 79 79 """ 80 return a list of value of param ter to fit81 82 :param fitparams: list of param aters name to fit80 return a list of value of parameter to fit 81 82 :param fitparams: list of parameters name to fit 83 83 84 84 """ -
src/sas/sascalc/pr/invertor.py
r6da860a rb8080e1 222 222 elif name == 'est_bck': 223 223 value = self.get_est_bck() 224 if value == 1: 225 return True 226 else: 227 return False 224 return value == 1 228 225 elif name in self.__dict__: 229 226 return self.__dict__[name] … … 460 457 461 458 # If we need to fit the background, add a term 462 if self.est_bck == True:459 if self.est_bck: 463 460 nfunc_0 = nfunc 464 461 nfunc += 1 … … 500 497 cov = np.linalg.pinv(inv_cov) 501 498 err = math.fabs(chi2 / float(npts - nfunc)) * cov 502 except Exception as ex:499 except: 503 500 # We were not able to estimate the errors 504 501 # Return an empty error matrix 505 logger.error( ex)502 logger.error(sys.exc_value) 506 503 507 504 # Keep a copy of the last output 508 if self.est_bck == False:505 if not self.est_bck: 509 506 self.out = c 510 507 self.cov = err … … 540 537 541 538 """ 542 from sas.sascalc.pr.num_term import NTermEstimator539 from .num_term import NTermEstimator 543 540 estimator = NTermEstimator(self.clone()) 544 541 try: 545 542 return estimator.num_terms(isquit_func) 546 except Exception as ex:543 except: 547 544 # If we fail, estimate alpha and return the default 548 545 # number of terms 549 546 best_alpha, _, _ = self.estimate_alpha(self.nfunc) 550 logger.warning("Invertor.estimate_numterms: %s" % ex)547 logger.warning("Invertor.estimate_numterms: %s" % sys.exc_value) 551 548 return self.nfunc, best_alpha, "Could not estimate number of terms" 552 549 … … 634 631 return best_alpha, message, elapsed 635 632 636 except Exception as ex:637 message = "Invertor.estimate_alpha: %s" % ex633 except: 634 message = "Invertor.estimate_alpha: %s" % sys.exc_value 638 635 return 0, message, elapsed 639 636 … … 658 655 file.write("#slit_width=%g\n" % self.slit_width) 659 656 file.write("#background=%g\n" % self.background) 660 if self.est_bck == True:657 if self.est_bck: 661 658 file.write("#has_bck=1\n") 662 659 else: … … 738 735 elif line.startswith('#has_bck='): 739 736 toks = line.split('=') 740 if int(toks[1]) == 1: 741 self.est_bck = True 742 else: 743 self.est_bck = False 737 self.est_bck = int(toks[1]) == 1 744 738 745 739 # Now read in the parameters … … 754 748 self.cov[i][i] = float(toks2[1]) 755 749 756 except Exception as ex:757 msg = "Invertor.from_file: corrupted file\n%s" % ex750 except: 751 msg = "Invertor.from_file: corrupted file\n%s" % sys.exc_value 758 752 raise RuntimeError(msg) 759 753 else: -
src/sas/sascalc/pr/num_term.py
r8f83719f rb8080e1 55 55 medi = 0 56 56 for i in range(dv): 57 if odd == True:57 if odd: 58 58 medi = osc[int(med)] 59 59 else: … … 98 98 new_osc3.append(self.osc_list[i]) 99 99 100 if flag9 == True:100 if flag9: 101 101 self.dataset = new_osc1 102 elif flag8 == True:102 elif flag8: 103 103 self.dataset = new_osc2 104 104 else: … … 141 141 div = len(nts) 142 142 tem = float(div) / 2.0 143 odd = self.is_odd(div) 144 if odd == True: 143 if self.is_odd(div): 145 144 nt = nts[int(tem)] 146 145 else: … … 148 147 return nt, self.alpha_list[nt - 10], self.mess_list[nt - 10] 149 148 except: 150 #TODO: check the logic above and make sure it doesn't 149 #TODO: check the logic above and make sure it doesn't 151 150 # rely on the try-except. 152 151 return self.nterm_min, self.invertor.alpha, '' … … 183 182 data_y = np.append(data_y, test_y) 184 183 data_err = np.append(data_err, err) 185 except Exception as ex:186 logger.error( ex)184 except: 185 logger.error(sys.exc_value) 187 186 188 187 return data_x, data_y, data_err
Note: See TracChangeset
for help on using the changeset viewer.