Changes in / [ce0a245:2a399ca] in sasview
- Files:
-
- 18 edited
Legend:
- Unmodified
- Added
- Removed
-
docs/sphinx-docs/source/conf.py
r6394851 r959eb01 39 39 'sphinx.ext.viewcode'] 40 40 41 #set mathjax path42 mathjax_path="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-MML-AM_CHTML"43 44 41 # Add any paths that contain templates here, relative to this directory. 45 42 templates_path = ['_templates'] … … 65 62 version = '4.1' 66 63 # The full version, including alpha/beta/rc tags. 67 release = '4.1. 2'64 release = '4.1.0' 68 65 69 66 # The language for content autogenerated by Sphinx. Refer to documentation -
sasview/README.txt
r6394851 r311d00a 4 4 1- Features 5 5 =========== 6 - New in Version 4.1.27 --------------------8 This point release is a bug-fix release addressing:9 10 - Fixes #984: PDF Reports Generate Empty PDFs11 - Fixes a path typo12 - 64 bit and 32 bit Windows executables now available13 14 It is recommended that all users upgrade to this version15 16 - New in Version 4.1.117 --------------------18 This point release is a bug-fix release addressing:19 20 - Fixes #948: Mathjax CDN is going away21 - Fixes #938: Cannot read canSAS1D file output by SasView22 - Fixes #960: Save project throws error if empty fit page23 - Fixes #929: Problem deleting data in first fit page24 - Fixes #918: Test folders not bundled with release25 - Fixes an issue with the live discovery of plugin models26 - Fixes an issue with the NXcanSAS data loader27 - Updated tutorials for SasView 4.x.y28 29 6 - New in Version 4.1.0 30 7 ------------------ -
sasview/__init__.py
r6394851 r463e7ffc 1 __version__ = "4.1 .2"1 __version__ = "4.1" 2 2 __build__ = "GIT_COMMIT" 3 3 -
sasview/local_config.py
rd908932 ra1b8fee 47 47 '''This work benefited from the use of the SasView application, originally developed under NSF Award DMR-0520547. SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project Grant No 654000.''' 48 48 _acknowledgement_citation = \ 49 '''M. Doucet et al. SasView Version 4.1 .2, Zenodo, 10.5281/zenodo.825675'''49 '''M. Doucet et al. SasView Version 4.1, Zenodo, 10.5281/zenodo.438138''' 50 50 51 51 _acknowledgement = \ -
sasview/sasview.spec
r945f45d re42c8e9d 138 138 'sasmodels.core', 139 139 'pyopencl', 140 'tinycc', 141 'xhtml2pdf' 140 'tinycc' 142 141 ] 143 142 -
sasview/setup_exe.py
r3563e06 ra1b8fee 179 179 test_1d_dir = os.path.join(path, "test\\1d_data") 180 180 test_2d_dir = os.path.join(path, "test\\2d_data") 181 test_sesans_dir = os.path.join(path, "test\\sesans_data")182 test_convertible_dir = os.path.join(path, "test\\convertible_files")183 181 test_save_dir = os.path.join(path, "test\\save_states") 184 test_coord_dir = os.path.join(path, "test\\coordinate_data") 185 test_image_dir = os.path.join(path, "test\\image_data") 186 test_other_dir = os.path.join(path, "test\\other_files") 182 test_upcoming_dir = os.path.join(path, "test\\upcoming_formats") 187 183 188 184 matplotlibdatadir = matplotlib.get_data_path() … … 273 269 # Copying the images directory to the distribution directory. 274 270 for f in findall(images_dir): 275 data_files.append(("images", [f])) 271 if not ".svn" in f: 272 data_files.append(("images", [f])) 276 273 277 274 # Copying the HTML help docs 278 275 for f in findall(media_dir): 279 data_files.append(("media", [f])) 276 if not ".svn" in f: 277 data_files.append(("media", [f])) 280 278 281 279 # Copying the sample data user data 282 280 for f in findall(test_1d_dir): 283 data_files.append(("test\\1d_data", [f])) 281 if not ".svn" in f: 282 data_files.append(("test\\1d_data", [f])) 283 284 # Copying the sample data user data 284 285 for f in findall(test_2d_dir): 285 data_files.append(("test\\2d_data", [f])) 286 if not ".svn" in f: 287 data_files.append(("test\\2d_data", [f])) 288 289 # Copying the sample data user data 286 290 for f in findall(test_save_dir): 287 data_files.append(("test\\save_states", [f])) 288 for f in findall(test_sesans_dir): 289 data_files.append(("test\\sesans_data", [f])) 290 for f in findall(test_convertible_dir): 291 data_files.append(("test\\convertible_files", [f])) 292 for f in findall(test_coord_dir): 293 data_files.append(("test\\coordinate_data", [f])) 294 for f in findall(test_image_dir): 295 data_files.append(("test\\image_data", [f])) 296 for f in findall(test_other_dir): 297 data_files.append(("test\\other_files", [f])) 291 if not ".svn" in f: 292 data_files.append(("test\\save_states", [f])) 293 294 # Copying the sample data user data 295 for f in findall(test_upcoming_dir): 296 if not ".svn" in f: 297 data_files.append(("test\\upcoming_formats", [f])) 298 298 299 299 # Copying opencl include files -
src/sas/sascalc/dataloader/file_reader_base_class.py
rae69c690 ra78a02f 115 115 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64) 116 116 if data.dx is not None: 117 if len(data.dx) == 0:118 data.dx = None119 continue120 117 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64) 121 118 if data.dxl is not None: … … 124 121 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64) 125 122 if data.dy is not None: 126 if len(data.dy) == 0:127 data.dy = None128 continue129 123 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64) 130 124 if data.lam is not None: … … 191 185 self.output = [] 192 186 193 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False, 194 has_error_dxl=False, has_error_dxw=False): 187 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False): 195 188 """ 196 189 Remove any point where Q == 0 … … 199 192 self.current_dataset.x = self.current_dataset.x[x != 0] 200 193 self.current_dataset.y = self.current_dataset.y[x != 0] 201 if has_error_dy: 202 self.current_dataset.dy = self.current_dataset.dy[x != 0] 203 if has_error_dx: 204 self.current_dataset.dx = self.current_dataset.dx[x != 0] 205 if has_error_dxl: 206 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 207 if has_error_dxw: 208 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 194 self.current_dataset.dy = self.current_dataset.dy[x != 0] if \ 195 has_error_dy else np.zeros(len(self.current_dataset.y)) 196 self.current_dataset.dx = self.current_dataset.dx[x != 0] if \ 197 has_error_dx else np.zeros(len(self.current_dataset.x)) 209 198 210 199 def reset_data_list(self, no_lines=0): … … 215 204 x = np.zeros(no_lines) 216 205 y = np.zeros(no_lines) 206 dy = np.zeros(no_lines) 217 207 dx = np.zeros(no_lines) 218 dy = np.zeros(no_lines)219 208 self.current_dataset = plottable_1D(x, y, dx, dy) 220 209 -
src/sas/sascalc/dataloader/readers/cansas_reader.py
rae69c690 ra78a02f 130 130 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions 131 131 self._parse_entry(entry) 132 self.data_cleanup() 132 has_error_dx = self.current_dataset.dx is not None 133 has_error_dy = self.current_dataset.dy is not None 134 self.remove_empty_q_values(has_error_dx=has_error_dx, 135 has_error_dy=has_error_dy) 136 self.send_to_output() # Combine datasets with DataInfo 137 self.current_datainfo = DataInfo() # Reset DataInfo 133 138 except FileContentsException as fc_exc: 134 139 # File doesn't meet schema - try loading with a less strict schema … … 149 154 self.load_file_and_schema(xml_file) # Reload strict schema so we can find where error are in file 150 155 invalid_xml = self.find_invalid_xml() 151 if invalid_xml != "": 152 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 153 raise DataReaderException(invalid_xml) # Handled by base class 156 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 157 raise DataReaderException(invalid_xml) # Handled by base class 154 158 except FileContentsException as fc_exc: 155 159 msg = "CanSAS Reader could not load the file {}".format(xml_file) … … 275 279 # I and Q points 276 280 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 277 self.current_dataset.yaxis("Intensity", unit) 281 unit_list = unit.split("|") 282 if len(unit_list) > 1: 283 self.current_dataset.yaxis(unit_list[0].strip(), 284 unit_list[1].strip()) 285 else: 286 self.current_dataset.yaxis("Intensity", unit) 278 287 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 279 288 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 280 289 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 281 290 elif tagname == 'Q': 282 self.current_dataset.xaxis("Q", unit) 291 unit_list = unit.split("|") 292 if len(unit_list) > 1: 293 self.current_dataset.xaxis(unit_list[0].strip(), 294 unit_list[1].strip()) 295 else: 296 self.current_dataset.xaxis("Q", unit) 283 297 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 284 298 elif tagname == 'Qdev': 285 299 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 286 300 elif tagname == 'dQw': 287 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 301 if self.current_dataset.dxw is None: 302 self.current_dataset.dxw = np.empty(0) 303 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 288 304 elif tagname == 'dQl': 305 if self.current_dataset.dxl is None: 306 self.current_dataset.dxl = np.empty(0) 289 307 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 290 308 elif tagname == 'Qmean': … … 294 312 elif tagname == 'Sesans': 295 313 self.current_datainfo.isSesans = bool(data_point) 296 self.current_dataset.xaxis(attr.get('x_axis'),297 attr.get('x_unit'))298 self.current_dataset.yaxis(attr.get('y_axis'),299 attr.get('y_unit'))300 314 elif tagname == 'yacceptance': 301 315 self.current_datainfo.sample.yacceptance = (data_point, unit) … … 498 512 for error in self.errors: 499 513 self.current_datainfo.errors.add(error) 500 self.data_cleanup() 501 self.sort_one_d_data() 502 self.sort_two_d_data() 503 self.reset_data_list() 514 self.errors.clear() 515 self.send_to_output() 504 516 empty = None 505 517 return self.output[0], empty 506 507 def data_cleanup(self):508 """509 Clean up the data sets and refresh everything510 :return: None511 """512 has_error_dx = self.current_dataset.dx is not None513 has_error_dxl = self.current_dataset.dxl is not None514 has_error_dxw = self.current_dataset.dxw is not None515 has_error_dy = self.current_dataset.dy is not None516 self.remove_empty_q_values(has_error_dx=has_error_dx,517 has_error_dxl=has_error_dxl,518 has_error_dxw=has_error_dxw,519 has_error_dy=has_error_dy)520 self.send_to_output() # Combine datasets with DataInfo521 self.current_datainfo = DataInfo() # Reset DataInfo522 518 523 519 def _is_call_local(self): … … 646 642 value_unit = local_unit 647 643 except KeyError: 648 # Do not throw an error for loading Sesans data in cansas xml 649 # This is a temporary fix. 650 if local_unit != "A" and local_unit != 'pol': 651 err_msg = "CanSAS reader: unexpected " 652 err_msg += "\"{0}\" unit [{1}]; " 653 err_msg = err_msg.format(tagname, local_unit) 654 err_msg += "expecting [{0}]".format(default_unit) 644 err_msg = "CanSAS reader: unexpected " 645 err_msg += "\"{0}\" unit [{1}]; " 646 err_msg = err_msg.format(tagname, local_unit) 647 err_msg += "expecting [{0}]".format(default_unit) 655 648 value_unit = local_unit 656 649 except: … … 682 675 di_exists = True 683 676 if dqw_exists and not dql_exists: 684 array_size = self.current_dataset.dxw.size 685 self.current_dataset.dxl = np.zeros(array_size) 677 array_size = self.current_dataset.dxw.size - 1 678 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 679 np.zeros([array_size])) 686 680 elif dql_exists and not dqw_exists: 687 array_size = self.current_dataset.dxl.size 688 self.current_dataset.dxw = np.zeros(array_size) 681 array_size = self.current_dataset.dxl.size - 1 682 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 683 np.zeros([array_size])) 689 684 elif not dql_exists and not dqw_exists and not dq_exists: 690 array_size = self.current_dataset.x.size 685 array_size = self.current_dataset.x.size - 1 691 686 self.current_dataset.dx = np.append(self.current_dataset.dx, 692 687 np.zeros([array_size])) 693 688 if not di_exists: 694 array_size = self.current_dataset.y.size 689 array_size = self.current_dataset.y.size - 1 695 690 self.current_dataset.dy = np.append(self.current_dataset.dy, 696 691 np.zeros([array_size])) … … 862 857 node.append(point) 863 858 self.write_node(point, "Q", datainfo.x[i], 864 {'unit': datainfo. x_unit})859 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 865 860 if len(datainfo.y) >= i: 866 861 self.write_node(point, "I", datainfo.y[i], 867 {'unit': datainfo. y_unit})862 {'unit': datainfo._yaxis + " | " + datainfo._yunit}) 868 863 if datainfo.dy is not None and len(datainfo.dy) > i: 869 864 self.write_node(point, "Idev", datainfo.dy[i], 870 {'unit': datainfo. y_unit})865 {'unit': datainfo._yaxis + " | " + datainfo._yunit}) 871 866 if datainfo.dx is not None and len(datainfo.dx) > i: 872 867 self.write_node(point, "Qdev", datainfo.dx[i], 873 {'unit': datainfo. x_unit})868 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 874 869 if datainfo.dxw is not None and len(datainfo.dxw) > i: 875 870 self.write_node(point, "dQw", datainfo.dxw[i], 876 {'unit': datainfo. x_unit})871 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 877 872 if datainfo.dxl is not None and len(datainfo.dxl) > i: 878 873 self.write_node(point, "dQl", datainfo.dxl[i], 879 {'unit': datainfo. x_unit})874 {'unit': datainfo._xaxis + " | " + datainfo._xunit}) 880 875 if datainfo.isSesans: 881 sesans_attrib = {'x_axis': datainfo._xaxis, 882 'y_axis': datainfo._yaxis, 883 'x_unit': datainfo.x_unit, 884 'y_unit': datainfo.y_unit} 885 sesans = self.create_element("Sesans", attrib=sesans_attrib) 876 sesans = self.create_element("Sesans") 886 877 sesans.text = str(datainfo.isSesans) 887 entry_node.append(sesans)888 self.write_node( entry_node, "yacceptance", datainfo.sample.yacceptance[0],878 node.append(sesans) 879 self.write_node(node, "yacceptance", datainfo.sample.yacceptance[0], 889 880 {'unit': datainfo.sample.yacceptance[1]}) 890 self.write_node( entry_node, "zacceptance", datainfo.sample.zacceptance[0],881 self.write_node(node, "zacceptance", datainfo.sample.zacceptance[0], 891 882 {'unit': datainfo.sample.zacceptance[1]}) 892 883 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rc9ecd1b rdcb91cf 140 140 141 141 if isinstance(value, h5py.Group): 142 # Set parent class before recursion143 142 self.parent_class = class_name 144 143 parent_list.append(key) … … 151 150 # Recursion step to access data within the group 152 151 self.read_children(value, parent_list) 153 # Reset parent class when returning from recursive method154 self.parent_class = class_name155 152 self.add_intermediate() 156 153 parent_list.remove(key) -
src/sas/sascalc/dataloader/readers/xml_reader.py
rfafe52a rfafe52a 134 134 first_error = schema.assertValid(self.xmldoc) 135 135 except etree.DocumentInvalid as err: 136 # Suppress errors for <'any'> elements137 if "##other" in str(err):138 return first_error139 136 first_error = str(err) 140 137 return first_error -
src/sas/sascalc/invariant/invariant.py
rb1f20d1 r7432acb 610 610 # Data boundaries for fitting 611 611 qmin = self._data.x[0] 612 qmax = self._data.x[ int(self._low_extrapolation_npts - 1)]612 qmax = self._data.x[self._low_extrapolation_npts - 1] 613 613 614 614 # Extrapolate the low-Q data … … 649 649 # Data boundaries for fitting 650 650 x_len = len(self._data.x) - 1 651 qmin = self._data.x[ int(x_len - (self._high_extrapolation_npts - 1))]652 qmax = self._data.x[ int(x_len)]651 qmin = self._data.x[x_len - (self._high_extrapolation_npts - 1)] 652 qmax = self._data.x[x_len] 653 653 654 654 # fit the data with a model to get the appropriate parameters … … 688 688 if npts_in is None: 689 689 npts_in = self._low_extrapolation_npts 690 q_end = self._data.x[max(0, int(npts_in - 1))]690 q_end = self._data.x[max(0, npts_in - 1)] 691 691 692 692 if q_start >= q_end: … … 714 714 # Get extrapolation range 715 715 if npts_in is None: 716 npts_in = int(self._high_extrapolation_npts)716 npts_in = self._high_extrapolation_npts 717 717 _npts = len(self._data.x) 718 q_start = self._data.x[min(_npts, int(_npts - npts_in))]718 q_start = self._data.x[min(_npts, _npts - npts_in)] 719 719 720 720 if q_start >= q_end: -
src/sas/sasgui/guiframe/config.py
rd908932 ra1b8fee 48 48 '''This work benefited from the use of the SasView application, originally developed under NSF Award DMR-0520547. SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project Grant No 654000.''' 49 49 _acknowledgement_citation = \ 50 '''M. Doucet et al. SasView Version 4.1 .2, Zenodo, 10.5281/zenodo.825675'''50 '''M. Doucet et al. SasView Version 4.1, Zenodo, 10.5281/zenodo.438138''' 51 51 52 52 _acknowledgement = \ -
src/sas/sasgui/guiframe/documentation_window.py
rd7ee5866 r959eb01 75 75 logger.error("Could not find Sphinx documentation at %s \ 76 76 -- has it been built?", file_path) 77 #Commenting following 5 lines, so default browser is forced 78 #This is due to CDN mathjax discontinuation of service, intenal help 79 #browser should be back with qt version 80 #Note added by Wojtek Potrzebowski, July 4th 2017 81 # elif WX_SUPPORTS_HTML2: 82 # # Complete HTML/CSS support! 83 # self.view = html.WebView.New(self) 84 # self.view.LoadURL(url) 85 # self.Show() 77 elif WX_SUPPORTS_HTML2: 78 # Complete HTML/CSS support! 79 self.view = html.WebView.New(self) 80 self.view.LoadURL(url) 81 self.Show() 86 82 else: 87 83 logger.error("No html2 support, popping up a web browser") -
src/sas/sasgui/perspectives/fitting/fitpage.py
r79e6a33 r79e6a33 1243 1243 wx.PostEvent(self.parent, new_event) 1244 1244 # update list of plugins if new plugin is available 1245 custom_model = CUSTOM_MODEL 1245 1246 mod_cat = self.categorybox.GetStringSelection() 1246 if mod_cat == CUSTOM_MODEL: 1247 temp_id = self.model.id 1247 if mod_cat == custom_model: 1248 1248 temp = self.parent.update_model_list() 1249 for v in self.parent.model_dictionary.values():1250 if v.id == temp_id:1251 self.model = v()1252 break1253 1249 if temp: 1254 1250 self.model_list_box = temp -
src/sas/sasgui/perspectives/fitting/fitpanel.py
rc9ecd1b r6f9abd3 92 92 # state must be cloned 93 93 state = page.get_state().clone() 94 # data_list only populated with real data 95 # Fake object in data from page.get_data() if model is selected 96 if len(page.data_list) is not 0 and page.model is not None: 94 if data is not None or page.model is not None: 97 95 new_doc = self._manager.state_reader.write_toXML(data, 98 96 state, 99 97 batch_state) 100 # Fit #2 through #n are append to first fit101 98 if doc is not None and hasattr(doc, "firstChild"): 102 # Only append if properly formed new_doc 103 if new_doc is not None and hasattr(new_doc, "firstChild"): 104 child = new_doc.firstChild.firstChild 105 doc.firstChild.appendChild(child) 106 # First fit defines the main document 99 child = new_doc.firstChild.firstChild 100 doc.firstChild.appendChild(child) 107 101 else: 108 102 doc = new_doc … … 401 395 temp_data = page.get_data() 402 396 if temp_data is not None and temp_data.id in data: 403 self.close_page_with_data(temp_data) 397 self.SetSelection(pos) 398 self.on_close_page(event=None) 399 temp = self.GetSelection() 400 self.DeletePage(temp) 404 401 if self.sim_page is not None: 405 402 if len(self.sim_page.model_list) == 0: … … 407 404 self.SetSelection(pos) 408 405 self.on_close_page(event=None) 409 self.DeletePage(pos) 406 temp = self.GetSelection() 407 self.DeletePage(temp) 410 408 self.sim_page = None 411 409 self.batch_on = False -
src/sas/sasgui/perspectives/fitting/models.py
r632fda9 r632fda9 21 21 from sas.sasgui.guiframe.CategoryInstaller import CategoryInstaller 22 22 from sasmodels.sasview_model import load_custom_model, load_standard_models 23 from sas.sasgui.perspectives.fitting.fitpage import CUSTOM_MODEL24 23 25 24 logger = logging.getLogger(__name__) … … 267 266 temp = {} 268 267 if self.is_changed(): 269 temp = _find_models() 270 self.last_time_dir_modified = time.time() 271 return temp 268 return _find_models() 272 269 logger.info("plugin model : %s" % str(temp)) 273 270 return temp … … 326 323 if os.path.isdir(plugin_dir): 327 324 temp = os.path.getmtime(plugin_dir) 328 if self.last_time_dir_modified <temp:325 if self.last_time_dir_modified != temp: 329 326 is_modified = True 330 327 self.last_time_dir_modified = temp … … 337 334 new models were added else return empty dictionary 338 335 """ 339 self.plugins = []340 336 new_plugins = self.findModels() 341 if new_plugins: 342 for name, plug in new_plugins.items(): 343 self.stored_plugins[name] = plug 344 self.plugins.append(plug) 345 self.model_dictionary[name] = plug 346 self.model_combobox.set_list(CUSTOM_MODEL, self.plugins) 337 if len(new_plugins) > 0: 338 for name, plug in new_plugins.iteritems(): 339 if name not in self.stored_plugins.keys(): 340 self.stored_plugins[name] = plug 341 self.plugins.append(plug) 342 self.model_dictionary[name] = plug 343 self.model_combobox.set_list("Plugin Models", self.plugins) 347 344 return self.model_combobox.get_list() 348 345 else: -
src/sas/sasgui/perspectives/fitting/pagestate.py
rda9b239 r959eb01 617 617 value = "" 618 618 content = line.split(":") 619 if line == '' or len(content) == 1:620 continue621 619 name = content[0] 622 620 try: -
test/sasdataloader/test/utest_abs_reader.py
rae69c690 ra78a02f 333 333 self.assertEqual(self.data.x[1], 0.03) 334 334 self.assertAlmostEquals(self.data.y[1], 1001.0) 335 self.assertEqual(self.data.dx[0], 0.0) 335 336 self.assertEqual(self.data.dxl[1], 0.005) 336 337 self.assertEqual(self.data.dxw[1], 0.001)
Note: See TracChangeset
for help on using the changeset viewer.