- Timestamp:
- Sep 14, 2017 12:28:11 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 590b5c2
- Parents:
- 4660990 (diff), 7b3f154 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas
- Files:
-
- 3 added
- 16 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sasgui/guiframe/local_perspectives/plotting/plotting.py
r235f514 r2d9526d 14 14 import wx 15 15 import sys 16 from copy import deepcopy 16 17 from sas.sasgui.guiframe.events import EVT_NEW_PLOT 17 18 from sas.sasgui.guiframe.events import EVT_PLOT_QRANGE … … 275 276 action_check = True 276 277 else: 278 if action_string == 'update': 279 # Update all existing plots of data with this ID 280 for data in event.plots: 281 for panel in self.plot_panels.values(): 282 if data.id in panel.plots.keys(): 283 plot_exists = True 284 # Pass each panel it's own copy of the data 285 # that's being updated, otherwise things like 286 # colour and line thickness are unintentionally 287 # synced across panels 288 self.update_panel(deepcopy(data), panel) 289 return 290 277 291 group_id = event.group_id 278 if group_id in self.plot_panels .keys():292 if group_id in self.plot_panels: 279 293 #remove data from panel 280 294 if action_string == 'remove': -
src/sas/sasgui/perspectives/fitting/fitting.py
r489f53a r2d9526d 1742 1742 @param unsmeared_error: data error, rescaled to unsmeared model 1743 1743 """ 1744 1745 1744 number_finite = np.count_nonzero(np.isfinite(y)) 1746 1745 np.nan_to_num(y) … … 1748 1747 data_description=model.name, 1749 1748 data_id=str(page_id) + " " + data.name) 1749 plots_to_update = [] # List of plottables that have changed since last calculation 1750 # Create the new theories 1750 1751 if unsmeared_model is not None: 1751 self.create_theory_1D(x, unsmeared_model, page_id, model, data, state, 1752 unsmeared_model_plot = self.create_theory_1D(x, unsmeared_model, 1753 page_id, model, data, state, 1752 1754 data_description=model.name + " unsmeared", 1753 1755 data_id=str(page_id) + " " + data.name + " unsmeared") 1756 plots_to_update.append(unsmeared_model_plot) 1754 1757 1755 1758 if unsmeared_data is not None and unsmeared_error is not None: 1756 self.create_theory_1D(x, unsmeared_data, page_id, model, data, state, 1759 unsmeared_data_plot = self.create_theory_1D(x, unsmeared_data, 1760 page_id, model, data, state, 1757 1761 data_description="Data unsmeared", 1758 1762 data_id="Data " + data.name + " unsmeared", 1759 1763 dy=unsmeared_error) 1760 # Comment this out until we can get P*S models with correctly populated parameters 1761 #if sq_model is not None and pq_model is not None: 1762 # self.create_theory_1D(x, sq_model, page_id, model, data, state, 1763 # data_description=model.name + " S(q)", 1764 # data_id=str(page_id) + " " + data.name + " S(q)") 1765 # self.create_theory_1D(x, pq_model, page_id, model, data, state, 1766 # data_description=model.name + " P(q)", 1767 # data_id=str(page_id) + " " + data.name + " P(q)") 1764 plots_to_update.append(unsmeared_data_plot) 1765 if sq_model is not None and pq_model is not None: 1766 sq_id = str(page_id) + " " + data.name + " S(q)" 1767 sq_plot = self.create_theory_1D(x, sq_model, page_id, model, data, state, 1768 data_description=model.name + " S(q)", 1769 data_id=sq_id) 1770 plots_to_update.append(sq_plot) 1771 pq_id = str(page_id) + " " + data.name + " P(q)" 1772 pq_plot = self.create_theory_1D(x, pq_model, page_id, model, data, state, 1773 data_description=model.name + " P(q)", 1774 data_id=pq_id) 1775 plots_to_update.append(pq_plot) 1776 # Update the P(Q), S(Q) and unsmeared theory plots if they exist 1777 wx.PostEvent(self.parent, NewPlotEvent(plots=plots_to_update, 1778 action='update')) 1768 1779 1769 1780 current_pg = self.fit_panel.get_page_by_id(page_id) -
src/sas/sasgui/perspectives/fitting/media/fitting_help.rst
r5295cf5 r05b0bf6 484 484 .. ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 485 485 486 .. _Batch_Fit_Mode: 487 486 488 Batch Fit Mode 487 489 -------------- … … 636 638 637 639 Example: radius [2 : 5] , radius [10 : 25] 638 639 .. ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 640 641 .. note:: This help document was last changed by Steve King, 10Oct2016 640 641 .. ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 642 643 Combined Batch Fit Mode 644 ----------------------- 645 646 The purpose of the Combined Batch Fit is to allow running two or more batch 647 fits in sequence without overwriting the output table of results. This may be 648 of interest for example if one is fitting a series of data sets where there is 649 a shape change occurring in the series that requires changing the model part 650 way through the series; for example a sphere to rod transition. Indeed the 651 regular batch mode does not allow for multiple models and requires all the 652 files in the series to be fit with single model and set of parameters. While 653 it is of course possible to just run part of the series as a batch fit using 654 model one followed by running another batch fit on the rest of the series with 655 model two (and/or model three etc), doing so will overwrite the table of 656 outputs from the previous batch fit(s). This may not be desirable if one is 657 interested in comparing the parameters: for example the sphere radius of set 658 one and the cylinder radius of set two. 659 660 Method 661 ^^^^^^ 662 663 In order to use the *Combined Batch Fit*, first load all the data needed as 664 described in :ref:`Loading_data`. Next start up two or more *BatchPage* fits 665 following the instructions in :ref:`Batch_Fit_Mode` but **DO NOT PRESS FIT**. 666 At this point the *Combine Batch Fit* menu item under the *Fitting menu* should 667 be active (if there is one or no *BatchPage* the menu item will be greyed out 668 and inactive). Clicking on *Combine Batch Fit* will bring up a new panel, 669 similar to the *Const & Simult Fit* panel. In this case there will be a 670 checkbox for each *BatchPage* instead of each *FitPage* that should be included 671 in the fit. Once all are selected, click the Fit button on 672 the *BatchPage* to run each batch fit in *sequence* 673 674 .. image:: combine_batch_page.png 675 676 The batch table will then pop up at the end as for the case of the simple Batch 677 Fitting with the following caveats: 678 679 .. note:: 680 The order matters. The parameters in the table will be taken from the model 681 used in the first *BatchPage* of the list. Any parameters from the 682 second and later *BatchPage* s that have the same name as a parameter in the 683 first will show up allowing for plotting of that parameter across the 684 models. The other parameters will not be available in the grid. 685 .. note:: 686 a corralary of the above is that currently models created as a sum|multiply 687 model will not work as desired because the generated model parameters have a 688 p#_ appended to the beginning and thus radius and p1_radius will not be 689 recognized as the same parameter. 690 691 .. image:: combine_batch_grid.png 692 693 In the example shown above the data is a time series with a shifting peak. 694 The first part of the series was fitted using the *broad_peak* model, while 695 the rest of the data were fit using the *gaussian_peak* model. Unfortunately the 696 time is not listed in the file but the file name contains the information. As 697 described in :ref:`Grid_Window`, a column can be added manually, in this case 698 called time, and the peak position plotted against time. 699 700 .. image:: combine_batch_plot.png 701 702 Note the discontinuity in the peak position. This reflects the fact that the 703 Gaussian fit is a rather poor model for the data and is not actually 704 finding the peak. 705 706 .. ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ 707 708 .. note:: This help document was last changed by Paul Butler, 10 September 709 2017 -
src/sas/sasgui/perspectives/fitting/model_thread.py
r7432acb r0f9ea1c 71 71 (self.data.qy_data * self.data.qy_data)) 72 72 73 # For theory, qmax is based on 1d qmax 73 # For theory, qmax is based on 1d qmax 74 74 # so that must be mulitified by sqrt(2) to get actual max for 2d 75 75 index_model = (self.qmin <= radius) & (radius <= self.qmax) … … 91 91 self.data.qy_data[index_model] 92 92 ]) 93 output = np.zeros(len(self.data.qx_data)) 93 # Initialize output to NaN so masked elements do not get plotted. 94 output = np.empty_like(self.data.qx_data) 94 95 # output default is None 95 96 # This method is to distinguish between masked 96 97 #point(nan) and data point = 0. 97 output = output / output98 output[:] = np.NaN 98 99 # set value for self.mask==True, else still None to Plottools 99 100 output[index_model] = value … … 198 199 output[index] = self.model.evalDistribution(self.data.x[index]) 199 200 201 x=self.data.x[index] 202 y=output[index] 200 203 sq_values = None 201 204 pq_values = None 202 s_model = None203 p_model = None204 205 if isinstance(self.model, MultiplicationModel): 205 206 s_model = self.model.s_model 206 207 p_model = self.model.p_model 207 elif hasattr(self.model, "get_composition_models"): 208 p_model, s_model = self.model.get_composition_models() 209 210 if p_model is not None and s_model is not None: 211 sq_values = np.zeros((len(self.data.x))) 212 pq_values = np.zeros((len(self.data.x))) 213 sq_values[index] = s_model.evalDistribution(self.data.x[index]) 214 pq_values[index] = p_model.evalDistribution(self.data.x[index]) 208 sq_values = s_model.evalDistribution(x) 209 pq_values = p_model.evalDistribution(x) 210 elif hasattr(self.model, "calc_composition_models"): 211 results = self.model.calc_composition_models(x) 212 if results is not None: 213 pq_values, sq_values = results 214 215 215 216 216 elapsed = time.time() - self.starttime 217 217 218 self.complete(x= self.data.x[index], y=output[index],218 self.complete(x=x, y=y, 219 219 page_id=self.page_id, 220 220 state=self.state, -
src/sas/sasgui/perspectives/fitting/simfitpage.py
r959eb01 ra9f9ca4 1 1 """ 2 Simultaneous fit page2 Simultaneous or Batch fit page 3 3 """ 4 # Note that this is used for both Simultaneous/Constrained fit AND for 5 # combined batch fit. This is done through setting of the batch_on parameter. 6 # There are the a half dozen or so places where an if statement is used as in 7 # if not batch_on: 8 # xxxx 9 # else: 10 # xxxx 11 # This is just wrong but dont have time to fix this go. Proper approach would be 12 # to strip all parts of the code that depend on batch_on and create the top 13 # level class from which a contrained/simultaneous fit page and a combined 14 # batch page inherit. 15 # 16 # 04/09/2017 --PDB 17 4 18 import sys 5 19 from collections import namedtuple … … 400 414 # General Help button 401 415 self.btHelp = wx.Button(self, wx.ID_HELP, 'HELP') 402 self.btHelp.SetToolTipString("Simultaneous/Constrained Fitting help.") 416 if self.batch_on: 417 self.btHelp.SetToolTipString("Combined Batch Fitting help.") 418 else: 419 self.btHelp.SetToolTipString("Simultaneous/Constrained Fitting help.") 403 420 self.btHelp.Bind(wx.EVT_BUTTON, self._on_help) 404 421 … … 527 544 """ 528 545 _TreeLocation = "user/sasgui/perspectives/fitting/fitting_help.html" 529 _PageAnchor = "#simultaneous-fit-mode" 530 _doc_viewer = DocumentationWindow(self, self.ID_DOC, _TreeLocation, 546 if not self.batch_on: 547 _PageAnchor = "#simultaneous-fit-mode" 548 _doc_viewer = DocumentationWindow(self, self.ID_DOC, _TreeLocation, 531 549 _PageAnchor, 532 550 "Simultaneous/Constrained Fitting Help") 551 else: 552 _PageAnchor = "#combined-batch-fit-mode" 553 _doc_viewer = DocumentationWindow(self, self.ID_DOC, _TreeLocation, 554 _PageAnchor, 555 "Combined Batch Fit Help") 533 556 534 557 def set_manager(self, manager): -
src/sas/sasgui/plottools/plottables.py
r45dffa69 r2d9526d 239 239 def replace(self, plottable): 240 240 """Replace an existing plottable from the graph""" 241 selected_color = None 241 # If the user has set a custom color, ensure the new plot is the same color 242 selected_color = plottable.custom_color 242 243 selected_plottable = None 243 244 for p in self.plottables.keys(): 244 245 if plottable.id == p.id: 245 246 selected_plottable = p 246 selected_color = self.plottables[p] 247 if selected_color is None: 248 selected_color = self.plottables[p] 247 249 break 248 if 250 if selected_plottable is not None and selected_color is not None: 249 251 del self.plottables[selected_plottable] 252 plottable.custom_color = selected_color 250 253 self.plottables[plottable] = selected_color 251 254 -
src/sas/sascalc/dataloader/file_reader_base_class.py
ra78a02f r4660990 115 115 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64) 116 116 if data.dx is not None: 117 if len(data.dx) == 0: 118 data.dx = None 119 continue 117 120 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64) 118 121 if data.dxl is not None: … … 121 124 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64) 122 125 if data.dy is not None: 126 if len(data.dy) == 0: 127 data.dy = None 128 continue 123 129 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64) 124 130 if data.lam is not None: … … 204 210 x = np.zeros(no_lines) 205 211 y = np.zeros(no_lines) 212 dx = np.zeros(no_lines) 206 213 dy = np.zeros(no_lines) 207 dx = np.zeros(no_lines)208 214 self.current_dataset = plottable_1D(x, y, dx, dy) 209 215 -
src/sas/sascalc/dataloader/readers/cansas_reader.py
ra78a02f rc54c965 130 130 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions 131 131 self._parse_entry(entry) 132 has_error_dx = self.current_dataset.dx is not None 133 has_error_dy = self.current_dataset.dy is not None 134 self.remove_empty_q_values(has_error_dx=has_error_dx, 135 has_error_dy=has_error_dy) 136 self.send_to_output() # Combine datasets with DataInfo 137 self.current_datainfo = DataInfo() # Reset DataInfo 132 self.data_cleanup() 138 133 except FileContentsException as fc_exc: 139 134 # File doesn't meet schema - try loading with a less strict schema … … 154 149 self.load_file_and_schema(xml_file) # Reload strict schema so we can find where error are in file 155 150 invalid_xml = self.find_invalid_xml() 156 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 157 raise DataReaderException(invalid_xml) # Handled by base class 151 if invalid_xml != "": 152 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 153 raise DataReaderException(invalid_xml) # Handled by base class 158 154 except FileContentsException as fc_exc: 159 155 msg = "CanSAS Reader could not load the file {}".format(xml_file) … … 279 275 # I and Q points 280 276 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 281 unit_list = unit.split("|") 282 if len(unit_list) > 1: 283 self.current_dataset.yaxis(unit_list[0].strip(), 284 unit_list[1].strip()) 285 else: 286 self.current_dataset.yaxis("Intensity", unit) 277 self.current_dataset.yaxis("Intensity", unit) 287 278 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 288 279 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 289 280 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 290 281 elif tagname == 'Q': 291 unit_list = unit.split("|") 292 if len(unit_list) > 1: 293 self.current_dataset.xaxis(unit_list[0].strip(), 294 unit_list[1].strip()) 295 else: 296 self.current_dataset.xaxis("Q", unit) 282 self.current_dataset.xaxis("Q", unit) 297 283 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 298 284 elif tagname == 'Qdev': … … 312 298 elif tagname == 'Sesans': 313 299 self.current_datainfo.isSesans = bool(data_point) 300 self.current_dataset.xaxis(attr.get('x_axis'), 301 attr.get('x_unit')) 302 self.current_dataset.yaxis(attr.get('y_axis'), 303 attr.get('y_unit')) 314 304 elif tagname == 'yacceptance': 315 305 self.current_datainfo.sample.yacceptance = (data_point, unit) … … 512 502 for error in self.errors: 513 503 self.current_datainfo.errors.add(error) 514 self.errors.clear() 515 self.send_to_output() 504 self.data_cleanup() 505 self.sort_one_d_data() 506 self.sort_two_d_data() 507 self.reset_data_list() 516 508 empty = None 517 509 return self.output[0], empty 510 511 def data_cleanup(self): 512 """ 513 Clean up the data sets and refresh everything 514 :return: None 515 """ 516 has_error_dx = self.current_dataset.dx is not None 517 has_error_dy = self.current_dataset.dy is not None 518 self.remove_empty_q_values(has_error_dx=has_error_dx, 519 has_error_dy=has_error_dy) 520 self.send_to_output() # Combine datasets with DataInfo 521 self.current_datainfo = DataInfo() # Reset DataInfo 518 522 519 523 def _is_call_local(self): … … 642 646 value_unit = local_unit 643 647 except KeyError: 644 err_msg = "CanSAS reader: unexpected " 645 err_msg += "\"{0}\" unit [{1}]; " 646 err_msg = err_msg.format(tagname, local_unit) 647 err_msg += "expecting [{0}]".format(default_unit) 648 # Do not throw an error for loading Sesans data in cansas xml 649 # This is a temporary fix. 650 if local_unit != "A" and local_unit != 'pol': 651 err_msg = "CanSAS reader: unexpected " 652 err_msg += "\"{0}\" unit [{1}]; " 653 err_msg = err_msg.format(tagname, local_unit) 654 err_msg += "expecting [{0}]".format(default_unit) 648 655 value_unit = local_unit 649 656 except: … … 857 864 node.append(point) 858 865 self.write_node(point, "Q", datainfo.x[i], 859 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})866 {'unit': datainfo.x_unit}) 860 867 if len(datainfo.y) >= i: 861 868 self.write_node(point, "I", datainfo.y[i], 862 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})869 {'unit': datainfo.y_unit}) 863 870 if datainfo.dy is not None and len(datainfo.dy) > i: 864 871 self.write_node(point, "Idev", datainfo.dy[i], 865 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})872 {'unit': datainfo.y_unit}) 866 873 if datainfo.dx is not None and len(datainfo.dx) > i: 867 874 self.write_node(point, "Qdev", datainfo.dx[i], 868 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})875 {'unit': datainfo.x_unit}) 869 876 if datainfo.dxw is not None and len(datainfo.dxw) > i: 870 877 self.write_node(point, "dQw", datainfo.dxw[i], 871 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})878 {'unit': datainfo.x_unit}) 872 879 if datainfo.dxl is not None and len(datainfo.dxl) > i: 873 880 self.write_node(point, "dQl", datainfo.dxl[i], 874 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})881 {'unit': datainfo.x_unit}) 875 882 if datainfo.isSesans: 876 sesans = self.create_element("Sesans") 883 sesans_attrib = {'x_axis': datainfo._xaxis, 884 'y_axis': datainfo._yaxis, 885 'x_unit': datainfo.x_unit, 886 'y_unit': datainfo.y_unit} 887 sesans = self.create_element("Sesans", attrib=sesans_attrib) 877 888 sesans.text = str(datainfo.isSesans) 878 node.append(sesans)879 self.write_node( node, "yacceptance", datainfo.sample.yacceptance[0],889 entry_node.append(sesans) 890 self.write_node(entry_node, "yacceptance", datainfo.sample.yacceptance[0], 880 891 {'unit': datainfo.sample.yacceptance[1]}) 881 self.write_node( node, "zacceptance", datainfo.sample.zacceptance[0],892 self.write_node(entry_node, "zacceptance", datainfo.sample.zacceptance[0], 882 893 {'unit': datainfo.sample.zacceptance[1]}) 883 894 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rdcb91cf rcd57c7d4 140 140 141 141 if isinstance(value, h5py.Group): 142 # Set parent class before recursion 142 143 self.parent_class = class_name 143 144 parent_list.append(key) … … 150 151 # Recursion step to access data within the group 151 152 self.read_children(value, parent_list) 153 # Reset parent class when returning from recursive method 154 self.parent_class = class_name 152 155 self.add_intermediate() 153 156 parent_list.remove(key) -
src/sas/sascalc/dataloader/readers/xml_reader.py
rfafe52a rcd57c7d4 134 134 first_error = schema.assertValid(self.xmldoc) 135 135 except etree.DocumentInvalid as err: 136 # Suppress errors for <'any'> elements 137 if "##other" in str(err): 138 return first_error 136 139 first_error = str(err) 137 140 return first_error -
src/sas/sascalc/invariant/invariant.py
r7432acb rb1f20d1 610 610 # Data boundaries for fitting 611 611 qmin = self._data.x[0] 612 qmax = self._data.x[ self._low_extrapolation_npts - 1]612 qmax = self._data.x[int(self._low_extrapolation_npts - 1)] 613 613 614 614 # Extrapolate the low-Q data … … 649 649 # Data boundaries for fitting 650 650 x_len = len(self._data.x) - 1 651 qmin = self._data.x[ x_len - (self._high_extrapolation_npts - 1)]652 qmax = self._data.x[ x_len]651 qmin = self._data.x[int(x_len - (self._high_extrapolation_npts - 1))] 652 qmax = self._data.x[int(x_len)] 653 653 654 654 # fit the data with a model to get the appropriate parameters … … 688 688 if npts_in is None: 689 689 npts_in = self._low_extrapolation_npts 690 q_end = self._data.x[max(0, npts_in - 1)]690 q_end = self._data.x[max(0, int(npts_in - 1))] 691 691 692 692 if q_start >= q_end: … … 714 714 # Get extrapolation range 715 715 if npts_in is None: 716 npts_in = self._high_extrapolation_npts716 npts_in = int(self._high_extrapolation_npts) 717 717 _npts = len(self._data.x) 718 q_start = self._data.x[min(_npts, _npts - npts_in)]718 q_start = self._data.x[min(_npts, int(_npts - npts_in))] 719 719 720 720 if q_start >= q_end: -
src/sas/sasgui/guiframe/config.py
ra1b8fee rce2819b 48 48 '''This work benefited from the use of the SasView application, originally developed under NSF Award DMR-0520547. SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project Grant No 654000.''' 49 49 _acknowledgement_citation = \ 50 '''M. Doucet et al. SasView Version 4.1 , Zenodo, 10.5281/zenodo.438138'''50 '''M. Doucet et al. SasView Version 4.1.2, Zenodo, 10.5281/zenodo.825675''' 51 51 52 52 _acknowledgement = \ -
src/sas/sasgui/guiframe/documentation_window.py
r959eb01 r6a455cd3 75 75 logger.error("Could not find Sphinx documentation at %s \ 76 76 -- has it been built?", file_path) 77 elif WX_SUPPORTS_HTML2: 78 # Complete HTML/CSS support! 79 self.view = html.WebView.New(self) 80 self.view.LoadURL(url) 81 self.Show() 77 #Commenting following 5 lines, so default browser is forced 78 #This is due to CDN mathjax discontinuation of service, intenal help 79 #browser should be back with qt version 80 #Note added by Wojtek Potrzebowski, July 4th 2017 81 # elif WX_SUPPORTS_HTML2: 82 # # Complete HTML/CSS support! 83 # self.view = html.WebView.New(self) 84 # self.view.LoadURL(url) 85 # self.Show() 82 86 else: 83 87 logger.error("No html2 support, popping up a web browser") -
src/sas/sasgui/perspectives/fitting/fitpage.py
red2276f r6a455cd3 1236 1236 wx.PostEvent(self.parent, new_event) 1237 1237 # update list of plugins if new plugin is available 1238 custom_model = CUSTOM_MODEL1239 1238 mod_cat = self.categorybox.GetStringSelection() 1240 if mod_cat == custom_model: 1239 if mod_cat == CUSTOM_MODEL: 1240 temp_id = self.model.id 1241 1241 temp = self.parent.update_model_list() 1242 for v in self.parent.model_dictionary.values(): 1243 if v.id == temp_id: 1244 self.model = v() 1245 break 1242 1246 if temp: 1243 1247 self.model_list_box = temp -
src/sas/sasgui/perspectives/fitting/fitpanel.py
r67b0a99 rc9ecd1b 92 92 # state must be cloned 93 93 state = page.get_state().clone() 94 if data is not None or page.model is not None: 94 # data_list only populated with real data 95 # Fake object in data from page.get_data() if model is selected 96 if len(page.data_list) is not 0 and page.model is not None: 95 97 new_doc = self._manager.state_reader.write_toXML(data, 96 98 state, 97 99 batch_state) 100 # Fit #2 through #n are append to first fit 98 101 if doc is not None and hasattr(doc, "firstChild"): 99 child = new_doc.firstChild.firstChild 100 doc.firstChild.appendChild(child) 102 # Only append if properly formed new_doc 103 if new_doc is not None and hasattr(new_doc, "firstChild"): 104 child = new_doc.firstChild.firstChild 105 doc.firstChild.appendChild(child) 106 # First fit defines the main document 101 107 else: 102 108 doc = new_doc … … 395 401 temp_data = page.get_data() 396 402 if temp_data is not None and temp_data.id in data: 397 self.SetSelection(pos) 398 self.on_close_page(event=None) 399 temp = self.GetSelection() 400 self.DeletePage(temp) 403 self.close_page_with_data(temp_data) 401 404 if self.sim_page is not None: 402 405 if len(self.sim_page.model_list) == 0: … … 404 407 self.SetSelection(pos) 405 408 self.on_close_page(event=None) 406 temp = self.GetSelection() 407 self.DeletePage(temp) 409 self.DeletePage(pos) 408 410 self.sim_page = None 409 411 self.batch_on = False -
src/sas/sasgui/perspectives/fitting/models.py
rb1c2011 rb682c6a 20 20 from sas.sasgui.guiframe.CategoryInstaller import CategoryInstaller 21 21 from sasmodels.sasview_model import load_custom_model, load_standard_models 22 from sas.sasgui.perspectives.fitting.fitpage import CUSTOM_MODEL 22 23 23 24 logger = logging.getLogger(__name__) … … 265 266 temp = {} 266 267 if self.is_changed(): 267 return _find_models() 268 temp = _find_models() 269 self.last_time_dir_modified = time.time() 270 return temp 268 271 logger.info("plugin model : %s" % str(temp)) 269 272 return temp … … 312 315 if os.path.isdir(plugin_dir): 313 316 temp = os.path.getmtime(plugin_dir) 314 if self.last_time_dir_modified !=temp:317 if self.last_time_dir_modified < temp: 315 318 is_modified = True 316 319 self.last_time_dir_modified = temp … … 323 326 new models were added else return empty dictionary 324 327 """ 328 self.plugins = [] 325 329 new_plugins = self.findModels() 326 if len(new_plugins) > 0: 327 for name, plug in new_plugins.iteritems(): 328 if name not in self.stored_plugins.keys(): 329 self.stored_plugins[name] = plug 330 self.plugins.append(plug) 331 self.model_dictionary[name] = plug 332 self.model_combobox.set_list("Plugin Models", self.plugins) 330 if new_plugins: 331 for name, plug in new_plugins.items(): 332 self.stored_plugins[name] = plug 333 self.plugins.append(plug) 334 self.model_dictionary[name] = plug 335 self.model_combobox.set_list(CUSTOM_MODEL, self.plugins) 333 336 return self.model_combobox.get_list() 334 337 else:
Note: See TracChangeset
for help on using the changeset viewer.