Changeset 8938502 in sasview for src/sas/sascalc
- Timestamp:
- Apr 10, 2017 11:01:14 AM (8 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, costrafo411, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 4b5f2657
- Parents:
- 5d1e040 (diff), d26f025 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc
- Files:
-
- 22 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/calculator/kiessig_calculator.py
rb699768 r235f514 49 49 return None 50 50 # check if delta_q is zero 51 if dq == 0.0 or dq ==None:51 if dq == 0.0 or dq is None: 52 52 return None 53 53 else: -
src/sas/sascalc/calculator/resolution_calculator.py
r9c0f3c17 r7432acb 608 608 a_value *= 1.0E-16 609 609 # if lamda is give (broad meanning of A) return 2* lamda^2 * A 610 if lamda !=None:610 if lamda is not None: 611 611 a_value *= (4 * lamda * lamda) 612 612 return a_value -
src/sas/sascalc/calculator/sas_gen.py
r9c0f3c17 r7432acb 97 97 :Param volume: pixel volume [float] 98 98 """ 99 if self.data_vol ==None:99 if self.data_vol is None: 100 100 raise 101 101 self.data_vol = volume … … 119 119 pos_z = self.data_z 120 120 len_x = len(pos_x) 121 if self.is_avg ==None:121 if self.is_avg is None: 122 122 len_x *= -1 123 123 pos_x, pos_y, pos_z = transform_center(pos_x, pos_y, pos_z) … … 257 257 self.sld_n = np.zeros(length) 258 258 259 if omfdata.mx ==None:259 if omfdata.mx is None: 260 260 self.mx = np.zeros(length) 261 if omfdata.my ==None:261 if omfdata.my is None: 262 262 self.my = np.zeros(length) 263 if omfdata.mz ==None:263 if omfdata.mz is None: 264 264 self.mz = np.zeros(length) 265 265 … … 691 691 output.set_pix_type('pixel') 692 692 output.set_pixel_symbols('pixel') 693 if vol_pix !=None:693 if vol_pix is not None: 694 694 output.set_pixel_volumes(vol_pix) 695 695 return output … … 703 703 :Param data: MagSLD data object 704 704 """ 705 if path ==None:705 if path is None: 706 706 raise ValueError, "Missing the file path." 707 if data ==None:707 if data is None: 708 708 raise ValueError, "Missing the data to save." 709 709 x_val = data.pos_x … … 713 713 length = len(x_val) 714 714 sld_n = data.sld_n 715 if sld_n ==None:715 if sld_n is None: 716 716 sld_n = np.zeros(length) 717 717 sld_mx = data.sld_mx 718 if sld_mx ==None:718 if sld_mx is None: 719 719 sld_mx = np.zeros(length) 720 720 sld_my = np.zeros(length) … … 867 867 self.sld_theta = None 868 868 self.pix_symbol = None 869 if sld_mx != None and sld_my != None and sld_mz !=None:869 if sld_mx is not None and sld_my is not None and sld_mz is not None: 870 870 self.set_sldms(sld_mx, sld_my, sld_mz) 871 871 self.set_nodes() … … 935 935 :Params pixel: str; pixel or atomic symbol, or array of strings 936 936 """ 937 if self.sld_n ==None:937 if self.sld_n is None: 938 938 return 939 939 if symbol.__class__.__name__ == 'str': … … 947 947 :Params pixel: str; pixel or atomic symbol, or array of strings 948 948 """ 949 if self.sld_n ==None:949 if self.sld_n is None: 950 950 return 951 951 if vol.__class__.__name__ == 'ndarray': -
src/sas/sascalc/data_util/calcthread.py
r64ca561 r7432acb 205 205 def update(self, **kwargs): 206 206 """Update GUI with the lastest results from the current work unit.""" 207 if self.updatefn !=None and clock() > self._time_for_update:207 if self.updatefn is not None and clock() > self._time_for_update: 208 208 self._lock.acquire() 209 209 self._time_for_update = clock() + self._delay … … 221 221 def complete(self, **kwargs): 222 222 """Update the GUI with the completed results from a work unit.""" 223 if self.completefn !=None:223 if self.completefn is not None: 224 224 self.completefn(**kwargs) 225 225 sleep(self.yieldtime) -
src/sas/sascalc/data_util/qsmearing.py
re962d85 r8938502 42 42 # This checks for 2D data (does not throw exception because fail is common) 43 43 if data.__class__.__name__ not in ['Data1D', 'Theory1D']: 44 if data ==None:44 if data is None: 45 45 return None 46 elif data.dqx_data == None or data.dqy_data ==None:46 elif data.dqx_data is None or data.dqy_data is None: 47 47 return None 48 48 return PySmear2D(data) -
src/sas/sascalc/dataloader/data_info.py
r959eb01 r7432acb 806 806 # create zero vector 807 807 dy_other = other.dy 808 if other.dy ==None or (len(other.dy) != len(other.y)):808 if other.dy is None or (len(other.dy) != len(other.y)): 809 809 dy_other = np.zeros(len(other.y)) 810 810 811 811 # Check that we have errors, otherwise create zero vector 812 812 dy = self.dy 813 if self.dy ==None or (len(self.dy) != len(self.y)):813 if self.dy is None or (len(self.dy) != len(self.y)): 814 814 dy = np.zeros(len(self.y)) 815 815 … … 822 822 dy, dy_other = self._validity_check(other) 823 823 result = self.clone_without_data(len(self.x)) 824 if self.dxw ==None:824 if self.dxw is None: 825 825 result.dxw = None 826 826 else: 827 827 result.dxw = np.zeros(len(self.x)) 828 if self.dxl ==None:828 if self.dxl is None: 829 829 result.dxl = None 830 830 else: … … 884 884 self._validity_check_union(other) 885 885 result = self.clone_without_data(len(self.x) + len(other.x)) 886 if self.dy ==None or other.dy is None:886 if self.dy is None or other.dy is None: 887 887 result.dy = None 888 888 else: 889 889 result.dy = np.zeros(len(self.x) + len(other.x)) 890 if self.dx ==None or other.dx is None:890 if self.dx is None or other.dx is None: 891 891 result.dx = None 892 892 else: 893 893 result.dx = np.zeros(len(self.x) + len(other.x)) 894 if self.dxw ==None or other.dxw is None:894 if self.dxw is None or other.dxw is None: 895 895 result.dxw = None 896 896 else: 897 897 result.dxw = np.zeros(len(self.x) + len(other.x)) 898 if self.dxl ==None or other.dxl is None:898 if self.dxl is None or other.dxl is None: 899 899 result.dxl = None 900 900 else: … … 907 907 result.y = np.append(self.y, other.y) 908 908 result.y = result.y[ind] 909 if result.dy !=None:909 if result.dy is not None: 910 910 result.dy = np.append(self.dy, other.dy) 911 911 result.dy = result.dy[ind] … … 1030 1030 # Check that the scales match 1031 1031 err_other = other.err_data 1032 if other.err_data ==None or \1032 if other.err_data is None or \ 1033 1033 (len(other.err_data) != len(other.data)): 1034 1034 err_other = np.zeros(len(other.data)) … … 1036 1036 # Check that we have errors, otherwise create zero vector 1037 1037 err = self.err_data 1038 if self.err_data ==None or \1038 if self.err_data is None or \ 1039 1039 (len(self.err_data) != len(self.data)): 1040 1040 err = np.zeros(len(other.data)) … … 1051 1051 dy, dy_other = self._validity_check(other) 1052 1052 result = self.clone_without_data(np.size(self.data)) 1053 if self.dqx_data == None or self.dqy_data ==None:1053 if self.dqx_data is None or self.dqy_data is None: 1054 1054 result.dqx_data = None 1055 1055 result.dqy_data = None … … 1125 1125 result.ymin = self.ymin 1126 1126 result.ymax = self.ymax 1127 if self.dqx_data == None or self.dqy_data ==None or \1128 other.dqx_data == None or other.dqy_data ==None:1127 if self.dqx_data is None or self.dqy_data is None or \ 1128 other.dqx_data is None or other.dqy_data is None: 1129 1129 result.dqx_data = None 1130 1130 result.dqy_data = None -
src/sas/sascalc/dataloader/manipulations.py
r959eb01 r7432acb 210 210 y[i_q] += frac * data[npts] 211 211 212 if err_data ==None or err_data[npts] == 0.0:212 if err_data is None or err_data[npts] == 0.0: 213 213 if data[npts] < 0: 214 214 data[npts] = -data[npts] … … 333 333 continue 334 334 y += frac * data[npts] 335 if err_data ==None or err_data[npts] == 0.0:335 if err_data is None or err_data[npts] == 0.0: 336 336 if data[npts] < 0: 337 337 data[npts] = -data[npts] … … 422 422 423 423 # Get the dq for resolution averaging 424 if data2D.dqx_data != None and data2D.dqy_data !=None:424 if data2D.dqx_data is not None and data2D.dqy_data is not None: 425 425 # The pinholes and det. pix contribution present 426 426 # in both direction of the 2D which must be subtracted when … … 462 462 463 463 #q_data_max = numpy.max(q_data) 464 if len(data2D.q_data) ==None:464 if len(data2D.q_data) is None: 465 465 msg = "Circular averaging: invalid q_data: %g" % data2D.q_data 466 466 raise RuntimeError, msg … … 502 502 # Take dqs from data to get the q_average 503 503 x[i_q] += frac * q_value 504 if err_data ==None or err_data[npt] == 0.0:504 if err_data is None or err_data[npt] == 0.0: 505 505 if data_n < 0: 506 506 data_n = -data_n … … 508 508 else: 509 509 err_y[i_q] += frac * frac * err_data[npt] * err_data[npt] 510 if dq_data !=None:510 if dq_data is not None: 511 511 # To be consistent with dq calculation in 1d reduction, 512 512 # we need just the averages (not quadratures) because … … 523 523 err_y[n] = -err_y[n] 524 524 err_y[n] = math.sqrt(err_y[n]) 525 #if err_x !=None:525 #if err_x is not None: 526 526 # err_x[n] = math.sqrt(err_x[n]) 527 527 … … 532 532 idx = (numpy.isfinite(y)) & (numpy.isfinite(x)) 533 533 534 if err_x !=None:534 if err_x is not None: 535 535 d_x = err_x[idx] / y_counts[idx] 536 536 else: … … 623 623 phi_bins[i_phi] += frac * data[npt] 624 624 625 if err_data ==None or err_data[npt] == 0.0:625 if err_data is None or err_data[npt] == 0.0: 626 626 if data_n < 0: 627 627 data_n = -data_n … … 777 777 778 778 # Get the dq for resolution averaging 779 if data2D.dqx_data != None and data2D.dqy_data !=None:779 if data2D.dqx_data is not None and data2D.dqy_data is not None: 780 780 # The pinholes and det. pix contribution present 781 781 # in both direction of the 2D which must be subtracted when … … 888 888 y[i_bin] += frac * data_n 889 889 x[i_bin] += frac * q_value 890 if err_data[n] ==None or err_data[n] == 0.0:890 if err_data[n] is None or err_data[n] == 0.0: 891 891 if data_n < 0: 892 892 data_n = -data_n … … 895 895 y_err[i_bin] += frac * frac * err_data[n] * err_data[n] 896 896 897 if dq_data !=None:897 if dq_data is not None: 898 898 # To be consistent with dq calculation in 1d reduction, 899 899 # we need just the averages (not quadratures) because … … 925 925 y_err[y_err == 0] = numpy.average(y_err) 926 926 idx = (numpy.isfinite(y) & numpy.isfinite(y_err)) 927 if x_err !=None:927 if x_err is not None: 928 928 d_x = x_err[idx] / y_counts[idx] 929 929 else: -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r959eb01 r235f514 128 128 if new_lentoks > 2: 129 129 _dy = float(toks[2]) 130 has_error_dy = False if _dy ==None else True130 has_error_dy = False if _dy is None else True 131 131 132 132 # If a 4th row is present, consider it dx 133 133 if new_lentoks > 3: 134 134 _dx = float(toks[3]) 135 has_error_dx = False if _dx ==None else True135 has_error_dx = False if _dx is None else True 136 136 137 137 # Delete the previously stored lines of data candidates if -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r3c903ea r7432acb 807 807 :param data1d: presumably a Data1D object 808 808 """ 809 if self.current_dataset ==None:809 if self.current_dataset is None: 810 810 x_vals = np.empty(0) 811 811 y_vals = np.empty(0) … … 895 895 # Write the file 896 896 file_ref = open(filename, 'w') 897 if self.encoding ==None:897 if self.encoding is None: 898 898 self.encoding = "UTF-8" 899 899 doc.write(file_ref, encoding=self.encoding, … … 1015 1015 :param entry_node: lxml node ElementTree object to be appended to 1016 1016 """ 1017 if datainfo.run ==None or datainfo.run == []:1017 if datainfo.run is None or datainfo.run == []: 1018 1018 datainfo.run.append(RUN_NAME_DEFAULT) 1019 1019 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT … … 1133 1133 self.write_node(point, "T", spectrum.transmission[i], 1134 1134 {'unit': spectrum.transmission_unit}) 1135 if spectrum.transmission_deviation !=None \1135 if spectrum.transmission_deviation is not None \ 1136 1136 and len(spectrum.transmission_deviation) >= i: 1137 1137 self.write_node(point, "Tdev", … … 1213 1213 str(datainfo.source.name)) 1214 1214 self.append(source, instr) 1215 if datainfo.source.radiation ==None or datainfo.source.radiation == '':1215 if datainfo.source.radiation is None or datainfo.source.radiation == '': 1216 1216 datainfo.source.radiation = "neutron" 1217 1217 self.write_node(source, "radiation", datainfo.source.radiation) … … 1254 1254 :param instr: lxml node ElementTree object to be appended to 1255 1255 """ 1256 if datainfo.collimation == [] or datainfo.collimation ==None:1256 if datainfo.collimation == [] or datainfo.collimation is None: 1257 1257 coll = Collimation() 1258 1258 datainfo.collimation.append(coll) … … 1299 1299 :param inst: lxml instrument node to be appended to 1300 1300 """ 1301 if datainfo.detector ==None or datainfo.detector == []:1301 if datainfo.detector is None or datainfo.detector == []: 1302 1302 det = Detector() 1303 1303 det.name = "" … … 1464 1464 local_unit = None 1465 1465 exec "local_unit = storage.%s_unit" % toks[0] 1466 if local_unit !=None and units.lower() != local_unit.lower():1466 if local_unit is not None and units.lower() != local_unit.lower(): 1467 1467 if HAS_CONVERTER == True: 1468 1468 try: -
src/sas/sascalc/dataloader/readers/danse_reader.py
r959eb01 r235f514 166 166 167 167 x_vals.append(qx) 168 if xmin ==None or qx < xmin:168 if xmin is None or qx < xmin: 169 169 xmin = qx 170 if xmax ==None or qx > xmax:170 if xmax is None or qx > xmax: 171 171 xmax = qx 172 172 … … 181 181 182 182 y_vals.append(qy) 183 if ymin ==None or qy < ymin:183 if ymin is None or qy < ymin: 184 184 ymin = qy 185 if ymax ==None or qy > ymax:185 if ymax is None or qy > ymax: 186 186 ymax = qy 187 187 -
src/sas/sascalc/dataloader/readers/xml_reader.py
r463e7ffc r235f514 240 240 :param name: The name of the element to be created 241 241 """ 242 if attrib ==None:242 if attrib is None: 243 243 attrib = {} 244 244 return etree.Element(name, attrib, nsmap) … … 299 299 """ 300 300 text = str(text) 301 if attrib ==None:301 if attrib is None: 302 302 attrib = {} 303 303 elem = E(elementname, attrib, text) -
src/sas/sascalc/file_converter/cansas_writer.py
r0b1a677 r7432acb 17 17 # Write the file 18 18 file_ref = open(filename, 'w') 19 if self.encoding ==None:19 if self.encoding is None: 20 20 self.encoding = "UTF-8" 21 21 doc.write(file_ref, encoding=self.encoding, … … 96 96 self.write_node(point, "I", datainfo.y[i], 97 97 {'unit': datainfo.y_unit}) 98 if datainfo.dy !=None and len(datainfo.dy) > i:98 if datainfo.dy is not None and len(datainfo.dy) > i: 99 99 self.write_node(point, "Idev", datainfo.dy[i], 100 100 {'unit': datainfo.y_unit}) 101 if datainfo.dx !=None and len(datainfo.dx) > i:101 if datainfo.dx is not None and len(datainfo.dx) > i: 102 102 self.write_node(point, "Qdev", datainfo.dx[i], 103 103 {'unit': datainfo.x_unit}) 104 if datainfo.dxw !=None and len(datainfo.dxw) > i:104 if datainfo.dxw is not None and len(datainfo.dxw) > i: 105 105 self.write_node(point, "dQw", datainfo.dxw[i], 106 106 {'unit': datainfo.x_unit}) 107 if datainfo.dxl !=None and len(datainfo.dxl) > i:107 if datainfo.dxl is not None and len(datainfo.dxl) > i: 108 108 self.write_node(point, "dQl", datainfo.dxl[i], 109 109 {'unit': datainfo.x_unit}) -
src/sas/sascalc/fit/AbstractFitEngine.py
r9a5097c r7432acb 190 190 if qmin == 0.0 and not np.isfinite(self.y[qmin]): 191 191 self.qmin = min(self.x[self.x != 0]) 192 elif qmin !=None:192 elif qmin is not None: 193 193 self.qmin = qmin 194 if qmax !=None:194 if qmax is not None: 195 195 self.qmax = qmax 196 196 # Determine the range needed in unsmeared-Q to cover … … 202 202 self._last_unsmeared_bin = len(self.x) - 1 203 203 204 if self.smearer !=None:204 if self.smearer is not None: 205 205 self._first_unsmeared_bin, self._last_unsmeared_bin = \ 206 206 self.smearer.get_bin_range(self.qmin, self.qmax) … … 294 294 295 295 ## fitting range 296 if qmin ==None:296 if qmin is None: 297 297 self.qmin = 1e-16 298 if qmax ==None:298 if qmax is None: 299 299 self.qmax = math.sqrt(x_max * x_max + y_max * y_max) 300 300 ## new error image for fitting purpose 301 if self.err_data ==None or self.err_data == []:301 if self.err_data is None or self.err_data == []: 302 302 self.res_err_data = np.ones(len(self.data)) 303 303 else: … … 318 318 Set smearer 319 319 """ 320 if smearer ==None:320 if smearer is None: 321 321 return 322 322 self.smearer = smearer … … 330 330 if qmin == 0.0: 331 331 self.qmin = 1e-16 332 elif qmin !=None:332 elif qmin is not None: 333 333 self.qmin = qmin 334 if qmax !=None:334 if qmax is not None: 335 335 self.qmax = qmax 336 336 self.radius = np.sqrt(self.qx_data**2 + self.qy_data**2) … … 357 357 return the residuals 358 358 """ 359 if self.smearer !=None:359 if self.smearer is not None: 360 360 fn.set_index(self.idx) 361 361 gn = fn.get_value() … … 612 612 """ 613 613 """ 614 if self.pvec ==None and self.model is None and self.param_list is None:614 if self.pvec is None and self.model is None and self.param_list is None: 615 615 return "No results" 616 616 -
src/sas/sascalc/fit/Loader.py
r959eb01 rac07a3a 34 34 def set_values(self): 35 35 """ Store the values loaded from file in local variables""" 36 if not self.filename ==None:36 if self.filename is not None: 37 37 input_f = open(self.filename, 'r') 38 38 buff = input_f.read() -
src/sas/sascalc/fit/MultiplicationModel.py
r9a5097c r7432acb 178 178 """ 179 179 value = self.params['volfraction'] 180 if value !=None:180 if value is not None: 181 181 factor = self.p_model.calculate_VR() 182 if factor ==None or factor == NotImplemented or factor == 0.0:182 if factor is None or factor == NotImplemented or factor == 0.0: 183 183 val = value 184 184 else: … … 195 195 effective_radius = self.p_model.calculate_ER() 196 196 #Reset the effective_radius of s_model just before the run 197 if effective_radius !=None and effective_radius != NotImplemented:197 if effective_radius is not None and effective_radius != NotImplemented: 198 198 self.s_model.setParam('radius_effective', effective_radius) 199 199 -
src/sas/sascalc/invariant/invariant.py
r959eb01 r7432acb 329 329 330 330 ##power is given only for function = power_law 331 if power !=None:331 if power is not None: 332 332 sigma2 = linearized_data.dy * linearized_data.dy 333 333 a = -(power) … … 389 389 self._data = self._get_data(data) 390 390 # get the dxl if the data is smeared: This is done only once on init. 391 if self._data.dxl !=None and self._data.dxl.all() > 0:391 if self._data.dxl is not None and self._data.dxl.all() > 0: 392 392 # assumes constant dxl 393 393 self._smeared = self._data.dxl[0] … … 579 579 580 580 result_data = LoaderData1D(x=q, y=iq, dy=diq) 581 if self._smeared !=None:581 if self._smeared is not None: 582 582 result_data.dxl = self._smeared * np.ones(len(q)) 583 583 return result_data -
src/sas/sascalc/pr/fit/AbstractFitEngine.py
r9a5097c r7432acb 190 190 if qmin == 0.0 and not np.isfinite(self.y[qmin]): 191 191 self.qmin = min(self.x[self.x != 0]) 192 elif qmin !=None:192 elif qmin is not None: 193 193 self.qmin = qmin 194 if qmax !=None:194 if qmax is not None: 195 195 self.qmax = qmax 196 196 # Determine the range needed in unsmeared-Q to cover … … 202 202 self._last_unsmeared_bin = len(self.x) - 1 203 203 204 if self.smearer !=None:204 if self.smearer is not None: 205 205 self._first_unsmeared_bin, self._last_unsmeared_bin = \ 206 206 self.smearer.get_bin_range(self.qmin, self.qmax) … … 294 294 295 295 ## fitting range 296 if qmin ==None:296 if qmin is None: 297 297 self.qmin = 1e-16 298 if qmax ==None:298 if qmax is None: 299 299 self.qmax = math.sqrt(x_max * x_max + y_max * y_max) 300 300 ## new error image for fitting purpose 301 if self.err_data ==None or self.err_data == []:301 if self.err_data is None or self.err_data == []: 302 302 self.res_err_data = np.ones(len(self.data)) 303 303 else: … … 318 318 Set smearer 319 319 """ 320 if smearer ==None:320 if smearer is None: 321 321 return 322 322 self.smearer = smearer … … 330 330 if qmin == 0.0: 331 331 self.qmin = 1e-16 332 elif qmin !=None:332 elif qmin is not None: 333 333 self.qmin = qmin 334 if qmax !=None:334 if qmax is not None: 335 335 self.qmax = qmax 336 336 self.radius = np.sqrt(self.qx_data**2 + self.qy_data**2) … … 357 357 return the residuals 358 358 """ 359 if self.smearer !=None:359 if self.smearer is not None: 360 360 fn.set_index(self.idx) 361 361 # Get necessary data from self.data and set the data for smearing … … 615 615 """ 616 616 """ 617 if self.pvec ==None and self.model is None and self.param_list is None:617 if self.pvec is None and self.model is None and self.param_list is None: 618 618 return "No results" 619 619 -
src/sas/sascalc/pr/fit/Loader.py
r959eb01 rac07a3a 34 34 def set_values(self): 35 35 """ Store the values loaded from file in local variables""" 36 if not self.filename ==None:36 if self.filename is not None: 37 37 input_f = open(self.filename, 'r') 38 38 buff = input_f.read() -
src/sas/sascalc/pr/invertor.py
r9c0f3c17 r45dffa69 162 162 return self.set_dmax(value) 163 163 elif name == 'q_min': 164 if value ==None:164 if value is None: 165 165 return self.set_qmin(-1.0) 166 166 return self.set_qmin(value) 167 167 elif name == 'q_max': 168 if value ==None:168 if value is None: 169 169 return self.set_qmax(-1.0) 170 170 return self.set_qmax(value) … … 395 395 Check q-value against user-defined range 396 396 """ 397 if not self.q_min ==None and q < self.q_min:397 if self.q_min is not None and q < self.q_min: 398 398 return False 399 if not self.q_max ==None and q > self.q_max:399 if self.q_max is not None and q > self.q_max: 400 400 return False 401 401 return True … … 658 658 file.write("#has_bck=0\n") 659 659 file.write("#alpha_estimate=%g\n" % self.suggested_alpha) 660 if not self.out ==None:660 if self.out is not None: 661 661 if len(self.out) == len(self.cov): 662 662 for i in range(len(self.out)): -
src/sas/sascalc/pr/num_term.py
r959eb01 r7432acb 67 67 self.alpha_list = [] 68 68 for k in range(self.nterm_min, self.nterm_max, 1): 69 if self.isquit_func !=None:69 if self.isquit_func is not None: 70 70 self.isquit_func() 71 71 best_alpha, message, _ = inver.estimate_alpha(k) … … 159 159 scale = None 160 160 min_err = 0.0 161 if not path ==None:161 if path is not None: 162 162 input_f = open(path, 'r') 163 163 buff = input_f.read() … … 171 171 err = float(toks[2]) 172 172 else: 173 if scale ==None:173 if scale is None: 174 174 scale = 0.05 * math.sqrt(test_y) 175 175 #scale = 0.05/math.sqrt(y) -
src/sas/sascalc/realspace/VolumeCanvas.py
r959eb01 r235f514 293 293 """ 294 294 # If the handle is not provided, create one 295 if id ==None:295 if id is None: 296 296 id = shapeDesc.params["type"]+str(self.shapecount) 297 297 … … 325 325 """ 326 326 # If the handle is not provided, create one 327 if id ==None:327 if id is None: 328 328 id = "shape"+str(self.shapecount) 329 329 … … 663 663 # If this is the first simulation call, we need to generate the 664 664 # space points 665 if self.points ==None:665 if self.points is None: 666 666 self._create_modelObject() 667 667 668 668 # Protect against empty model 669 if self.points ==None:669 if self.points is None: 670 670 return 0 671 671 -
src/sas/sascalc/dataloader/readers/sesans_reader.py
r9a5097c rf6c2555 1 1 """ 2 2 SESANS reader (based on ASCII reader) 3 3 4 4 Reader for .ses or .sesans file format 5 6 Jurrian Bakker 5 6 Jurrian Bakker 7 7 """ 8 8 import numpy as np … … 18 18 _ZERO = 1e-16 19 19 20 20 21 class Reader: 21 22 """ 22 23 Class to load sesans files (6 columns). 23 24 """ 24 # #File type25 # File type 25 26 type_name = "SESANS" 26 27 # #Wildcards27 28 # Wildcards 28 29 type = ["SESANS files (*.ses)|*.ses", 29 30 "SESANS files (*..sesans)|*.sesans"] 30 # #List of allowed extensions31 # List of allowed extensions 31 32 ext = ['.ses', '.SES', '.sesans', '.SESANS'] 32 33 # #Flag to bypass extension check33 34 # Flag to bypass extension check 34 35 allow_all = True 35 36 36 37 def read(self, path): 37 38 # print "reader triggered"39 40 38 """ 41 39 Load data file 42 40 43 41 :param path: file path 44 42 45 43 :return: SESANSData1D object, or None 46 44 47 45 :raise RuntimeError: when the file can't be opened 48 46 :raise ValueError: when the length of the data vectors are inconsistent … … 51 49 basename = os.path.basename(path) 52 50 _, extension = os.path.splitext(basename) 53 if self.allow_all or extension.lower() in self.ext: 54 try: 55 # Read in binary mode since GRASP frequently has no-ascii 56 # characters that brakes the open operation 57 input_f = open(path,'rb') 58 except: 59 raise RuntimeError, "sesans_reader: cannot open %s" % path 60 buff = input_f.read() 61 lines = buff.splitlines() 62 x = np.zeros(0) 63 y = np.zeros(0) 64 dy = np.zeros(0) 65 lam = np.zeros(0) 66 dlam = np.zeros(0) 67 dx = np.zeros(0) 68 69 #temp. space to sort data 70 tx = np.zeros(0) 71 ty = np.zeros(0) 72 tdy = np.zeros(0) 73 tlam = np.zeros(0) 74 tdlam = np.zeros(0) 75 tdx = np.zeros(0) 76 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, isSesans=True) 77 self.filename = output.filename = basename 51 if not (self.allow_all or extension.lower() in self.ext): 52 raise RuntimeError( 53 "{} has an unrecognized file extension".format(path)) 54 else: 55 raise RuntimeError("{} is not a file".format(path)) 56 with open(path, 'r') as input_f: 57 line = input_f.readline() 58 params = {} 59 while not line.startswith("BEGIN_DATA"): 60 terms = line.split() 61 if len(terms) >= 2: 62 params[terms[0]] = " ".join(terms[1:]) 63 line = input_f.readline() 64 self.params = params 78 65 79 paramnames=[] 80 paramvals=[] 81 zvals=[] 82 dzvals=[] 83 lamvals=[] 84 dlamvals=[] 85 Pvals=[] 86 dPvals=[] 66 if "FileFormatVersion" not in self.params: 67 raise RuntimeError("SES file missing FileFormatVersion") 68 if float(self.params["FileFormatVersion"]) >= 2.0: 69 raise RuntimeError("SASView only supports SES version 1") 87 70 88 for line in lines: 89 # Initial try for CSV (split on ,) 90 line=line.strip() 91 toks = line.split('\t') 92 if len(toks)==2: 93 paramnames.append(toks[0]) 94 paramvals.append(toks[1]) 95 if len(toks)>5: 96 zvals.append(toks[0]) 97 dzvals.append(toks[3]) 98 lamvals.append(toks[4]) 99 dlamvals.append(toks[5]) 100 Pvals.append(toks[1]) 101 dPvals.append(toks[2]) 102 else: 103 continue 71 if "SpinEchoLength_unit" not in self.params: 72 raise RuntimeError("SpinEchoLength has no units") 73 if "Wavelength_unit" not in self.params: 74 raise RuntimeError("Wavelength has no units") 75 if params["SpinEchoLength_unit"] != params["Wavelength_unit"]: 76 raise RuntimeError("The spin echo data has rudely used " 77 "different units for the spin echo length " 78 "and the wavelength. While sasview could " 79 "handle this instance, it is a violation " 80 "of the file format and will not be " 81 "handled by other software.") 104 82 105 x=[] 106 y=[] 107 lam=[] 108 dx=[] 109 dy=[] 110 dlam=[] 111 lam_header = lamvals[0].split() 112 data_conv_z = None 113 default_z_unit = "A" 114 data_conv_P = None 115 default_p_unit = " " # Adjust unit for axis (L^-3) 116 lam_unit = lam_header[1].replace("[","").replace("]","") 117 if lam_unit == 'AA': 118 lam_unit = 'A' 119 varheader=[zvals[0],dzvals[0],lamvals[0],dlamvals[0],Pvals[0],dPvals[0]] 120 valrange=range(1, len(zvals)) 121 for i in valrange: 122 x.append(float(zvals[i])) 123 y.append(float(Pvals[i])) 124 lam.append(float(lamvals[i])) 125 dy.append(float(dPvals[i])) 126 dx.append(float(dzvals[i])) 127 dlam.append(float(dlamvals[i])) 83 headers = input_f.readline().split() 128 84 129 x,y,lam,dy,dx,dlam = [130 np.asarray(v, 'double')131 for v in (x,y,lam,dy,dx,dlam)132 ]85 self._insist_header(headers, "SpinEchoLength") 86 self._insist_header(headers, "Depolarisation") 87 self._insist_header(headers, "Depolarisation_error") 88 self._insist_header(headers, "Wavelength") 133 89 134 input_f.close()90 data = np.loadtxt(input_f) 135 91 136 output.x, output.x_unit = self._unit_conversion(x, lam_unit, default_z_unit) 137 output.y = y 138 output.y_unit = r'\AA^{-2} cm^{-1}' # output y_unit added 139 output.dx, output.dx_unit = self._unit_conversion(dx, lam_unit, default_z_unit) 140 output.dy = dy 141 output.lam, output.lam_unit = self._unit_conversion(lam, lam_unit, default_z_unit) 142 output.dlam, output.dlam_unit = self._unit_conversion(dlam, lam_unit, default_z_unit) 143 144 output.xaxis(r"\rm{z}", output.x_unit) 145 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", output.y_unit) # Adjust label to ln P/(lam^2 t), remove lam column refs 92 if data.shape[1] != len(headers): 93 raise RuntimeError( 94 "File has {} headers, but {} columns".format( 95 len(headers), 96 data.shape[1])) 146 97 147 # Store loading process information 148 output.meta_data['loader'] = self.type_name 149 #output.sample.thickness = float(paramvals[6]) 150 output.sample.name = paramvals[1] 151 output.sample.ID = paramvals[0] 152 zaccept_unit_split = paramnames[7].split("[") 153 zaccept_unit = zaccept_unit_split[1].replace("]","") 154 if zaccept_unit.strip() == r'\AA^-1' or zaccept_unit.strip() == r'\A^-1': 155 zaccept_unit = "1/A" 156 output.sample.zacceptance=(float(paramvals[7]),zaccept_unit) 157 output.vars = varheader 98 if data.size < 1: 99 raise RuntimeError("{} is empty".format(path)) 100 x = data[:, headers.index("SpinEchoLength")] 101 if "SpinEchoLength_error" in headers: 102 dx = data[:, headers.index("SpinEchoLength_error")] 103 else: 104 dx = x*0.05 105 lam = data[:, headers.index("Wavelength")] 106 if "Wavelength_error" in headers: 107 dlam = data[:, headers.index("Wavelength_error")] 108 else: 109 dlam = lam*0.05 110 y = data[:, headers.index("Depolarisation")] 111 dy = data[:, headers.index("Depolarisation_error")] 158 112 159 if len(output.x) < 1: 160 raise RuntimeError, "%s is empty" % path 161 return output 113 lam_unit = self._unit_fetch("Wavelength") 114 x, x_unit = self._unit_conversion(x, "A", 115 self._unit_fetch( 116 "SpinEchoLength")) 117 dx, dx_unit = self._unit_conversion( 118 dx, lam_unit, 119 self._unit_fetch("SpinEchoLength")) 120 dlam, dlam_unit = self._unit_conversion( 121 dlam, lam_unit, 122 self._unit_fetch("Wavelength")) 123 y_unit = self._unit_fetch("Depolarisation") 162 124 163 else: 164 raise RuntimeError, "%s is not a file" % path 165 return None 125 output = Data1D(x=x, y=y, lam=lam, dy=dy, dx=dx, dlam=dlam, 126 isSesans=True) 166 127 167 def _unit_conversion(self, value, value_unit, default_unit): 168 if has_converter == True and value_unit != default_unit: 169 data_conv_q = Converter(value_unit) 170 value = data_conv_q(value, units=default_unit) 128 output.y_unit = y_unit 129 output.x_unit = x_unit 130 output.source.wavelength_unit = lam_unit 131 output.source.wavelength = lam 132 self.filename = output.filename = basename 133 output.xaxis(r"\rm{z}", x_unit) 134 # Adjust label to ln P/(lam^2 t), remove lam column refs 135 output.yaxis(r"\rm{ln(P)/(t \lambda^2)}", y_unit) 136 # Store loading process information 137 output.meta_data['loader'] = self.type_name 138 output.sample.name = params["Sample"] 139 output.sample.ID = params["DataFileTitle"] 140 output.sample.thickness = self._unit_conversion( 141 float(params["Thickness"]), "cm", 142 self._unit_fetch("Thickness"))[0] 143 144 output.sample.zacceptance = ( 145 float(params["Theta_zmax"]), 146 self._unit_fetch("Theta_zmax")) 147 148 output.sample.yacceptance = ( 149 float(params["Theta_ymax"]), 150 self._unit_fetch("Theta_ymax")) 151 return output 152 153 @staticmethod 154 def _insist_header(headers, name): 155 if name not in headers: 156 raise RuntimeError( 157 "Missing {} column in spin echo data".format(name)) 158 159 @staticmethod 160 def _unit_conversion(value, value_unit, default_unit): 161 """ 162 Performs unit conversion on a measurement. 163 164 :param value: The magnitude of the measurement 165 :param value_unit: a string containing the final desired unit 166 :param default_unit: string with the units of the original measurement 167 :return: The magnitude of the measurement in the new units 168 """ 169 # (float, string, string) -> float 170 if has_converter and value_unit != default_unit: 171 data_conv_q = Converter(default_unit) 172 value = data_conv_q(value, units=value_unit) 171 173 new_unit = default_unit 172 174 else: 173 175 new_unit = value_unit 174 176 return value, new_unit 177 178 def _unit_fetch(self, unit): 179 return self.params[unit+"_unit"]
Note: See TracChangeset
for help on using the changeset viewer.