Changeset 186d678 in sasview for src/sas/qtgui/Utilities
- Timestamp:
- Oct 31, 2018 6:08:16 AM (6 years ago)
- Branches:
- ESS_GUI, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc
- Children:
- 48df831
- Parents:
- b1b71ad (diff), 04e1c80 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/qtgui/Utilities
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/qtgui/Utilities/ResultPanel.py
r8748751 r0c83303 55 55 sys.modules['bumps.gui.plot_view'] = PlotView 56 56 57 def onPlotResults(self, results ):57 def onPlotResults(self, results, optimizer="Unknown"): 58 58 # Clear up previous results 59 59 for view in (self.convergenceView, self.correlationView, 60 60 self.uncertaintyView, self.traceView): 61 61 view.close() 62 # close all tabs. REMEMBER TO USE REVERSED RANGE!!! 63 for index in reversed(range(self.count())): 64 self.removeTab(index) 62 65 63 66 result = results[0][0] 64 67 filename = result.data.sas_data.filename 65 current_ time = datetime.datetime.now().strftime("%I:%M%p, %B %d, %Y")66 self.setWindowTitle(self.window_name + " - " + filename + " - " + current_ time)67 if hasattr(result, 'convergence') :68 current_optimizer = optimizer 69 self.setWindowTitle(self.window_name + " - " + filename + " - " + current_optimizer) 70 if hasattr(result, 'convergence') and len(result.convergence) > 0: 68 71 best, pop = result.convergence[:, 0], result.convergence[:, 1:] 69 72 self.convergenceView.update(best, pop) … … 89 92 for view in (self.correlationView, self.uncertaintyView, self.traceView): 90 93 view.close() 94 # no tabs in the widget - possibly LM optimizer. Mark "closed" 95 if self.count()==0: 96 self.close() 91 97 92 98 def closeEvent(self, event): -
src/sas/qtgui/Utilities/GuiUtils.py
raed159f rb1b71ad 11 11 import webbrowser 12 12 import urllib.parse 13 import json 14 from io import BytesIO 13 15 14 16 import numpy as np … … 26 28 from sas.qtgui.Plotting.PlotterData import Data1D 27 29 from sas.qtgui.Plotting.PlotterData import Data2D 30 from sas.qtgui.Plotting.Plottables import Plottable 31 from sas.sascalc.dataloader.data_info import Sample, Source, Vector 32 from sas.qtgui.Plotting.Plottables import View 33 from sas.qtgui.Plotting.Plottables import PlottableTheory1D 34 from sas.qtgui.Plotting.Plottables import PlottableFit1D 35 from sas.qtgui.Plotting.Plottables import Text 36 from sas.qtgui.Plotting.Plottables import Chisq 37 from sas.qtgui.MainWindow.DataState import DataState 38 28 39 from sas.sascalc.dataloader.loader import Loader 29 40 from sas.qtgui.Utilities import CustomDir … … 257 268 sendDataToGridSignal = QtCore.pyqtSignal(list) 258 269 259 # Action Save Analysis triggered260 saveAnalysisSignal = QtCore.pyqtSignal()261 262 270 # Mask Editor requested 263 271 maskEditorSignal = QtCore.pyqtSignal(Data2D) … … 287 295 resultPlotUpdateSignal = QtCore.pyqtSignal(list) 288 296 289 def updateModelItemWithPlot(item, update_data, name="" ):297 def updateModelItemWithPlot(item, update_data, name="", checkbox_state=None): 290 298 """ 291 299 Adds a checkboxed row named "name" to QStandardItem … … 312 320 # Force redisplay 313 321 return 314 315 322 # Create the new item 316 323 checkbox_item = createModelItemWithPlot(update_data, name) 317 324 325 if checkbox_state is not None: 326 checkbox_item.setCheckState(checkbox_state) 318 327 # Append the new row to the main item 319 328 item.appendRow(checkbox_item) … … 566 575 if isinstance(data.process, list) and data.process: 567 576 for process in data.process: 577 if process is None: 578 continue 568 579 process_date = process.date 569 580 process_date_item = QtGui.QStandardItem("Date: " + process_date) … … 1140 1151 return result 1141 1152 1153 def saveData(fp, data): 1154 """ 1155 save content of data to fp (a .write()-supporting file-like object) 1156 """ 1157 1158 def add_type(dict, type): 1159 dict['__type__'] = type.__name__ 1160 return dict 1161 1162 def jdefault(o): 1163 """ 1164 objects that can't otherwise be serialized need to be converted 1165 """ 1166 # tuples and sets (TODO: default JSONEncoder converts tuples to lists, create custom Encoder that preserves tuples) 1167 if isinstance(o, (tuple, set)): 1168 content = { 'data': list(o) } 1169 return add_type(content, type(o)) 1170 1171 # "simple" types 1172 if isinstance(o, (Sample, Source, Vector)): 1173 return add_type(o.__dict__, type(o)) 1174 if isinstance(o, (Plottable, View)): 1175 return add_type(o.__dict__, type(o)) 1176 1177 # DataState 1178 if isinstance(o, (Data1D, Data2D)): 1179 # don't store parent 1180 content = o.__dict__.copy() 1181 #content.pop('parent') 1182 return add_type(content, type(o)) 1183 1184 # ndarray 1185 if isinstance(o, np.ndarray): 1186 buffer = BytesIO() 1187 np.save(buffer, o) 1188 buffer.seek(0) 1189 content = { 'data': buffer.read().decode('latin-1') } 1190 return add_type(content, type(o)) 1191 1192 # not supported 1193 logging.info("data cannot be serialized to json: %s" % type(o)) 1194 return None 1195 1196 json.dump(data, fp, indent=2, sort_keys=True, default=jdefault) 1197 1198 def readDataFromFile(fp): 1199 ''' 1200 ''' 1201 supported = [ 1202 tuple, set, 1203 Sample, Source, Vector, 1204 Plottable, Data1D, Data2D, PlottableTheory1D, PlottableFit1D, Text, Chisq, View, 1205 DataState, np.ndarray] 1206 1207 lookup = dict((cls.__name__, cls) for cls in supported) 1208 1209 class TooComplexException(Exception): 1210 pass 1211 1212 def simple_type(cls, data, level): 1213 class Empty(object): 1214 def __init__(self): 1215 for key, value in data.items(): 1216 setattr(self, key, generate(value, level)) 1217 1218 # create target object 1219 o = Empty() 1220 o.__class__ = cls 1221 1222 return o 1223 1224 def construct(type, data, level): 1225 try: 1226 cls = lookup[type] 1227 except KeyError: 1228 logging.info('unknown type: %s' % type) 1229 return None 1230 1231 # tuples and sets 1232 if cls in (tuple, set): 1233 # convert list to tuple/set 1234 return cls(generate(data['data'], level)) 1235 1236 # "simple" types 1237 if cls in (Sample, Source, Vector): 1238 return simple_type(cls, data, level) 1239 if issubclass(cls, Plottable) or (cls == View): 1240 return simple_type(cls, data, level) 1241 1242 # DataState 1243 if cls == DataState: 1244 o = simple_type(cls, data, level) 1245 o.parent = None # TODO: set to ??? 1246 return o 1247 1248 # ndarray 1249 if cls == np.ndarray: 1250 buffer = BytesIO() 1251 buffer.write(data['data'].encode('latin-1')) 1252 buffer.seek(0) 1253 return np.load(buffer) 1254 1255 logging.info('not implemented: %s, %s' % (type, cls)) 1256 return None 1257 1258 def generate(data, level): 1259 if level > 16: # recursion limit (arbitrary number) 1260 raise TooComplexException() 1261 else: 1262 level += 1 1263 1264 if isinstance(data, dict): 1265 try: 1266 type = data['__type__'] 1267 except KeyError: 1268 # if dictionary doesn't have __type__ then it is assumed to be just an ordinary dictionary 1269 o = {} 1270 for key, value in data.items(): 1271 o[key] = generate(value, level) 1272 return o 1273 1274 return construct(type, data, level) 1275 1276 if isinstance(data, list): 1277 return [generate(item, level) for item in data] 1278 1279 return data 1280 1281 new_stored_data = {} 1282 for id, data in json.load(fp).items(): 1283 try: 1284 new_stored_data[id] = generate(data, 0) 1285 except TooComplexException: 1286 logging.info('unable to load %s' % id) 1287 1288 return new_stored_data 1289 1290 def readProjectFromSVS(filepath): 1291 """ 1292 Read old SVS file and convert to the project dictionary 1293 """ 1294 from sas.sascalc.dataloader.readers.cansas_reader import Reader as CansasReader 1295 from sas.sascalc.fit.pagestate import Reader 1296 1297 loader = Loader() 1298 loader.associate_file_reader('.svs', Reader) 1299 temp = loader.load(filepath) 1300 state_reader = Reader() 1301 data_svs, state_svs = state_reader.read(filepath) 1302 1303 output = [] 1304 if isinstance(temp, list) and isinstance(state_svs, list): 1305 for item, state in zip(temp, state_svs): 1306 output.append([item, state]) 1307 else: 1308 output[temp, state_svs] 1309 return output 1310 1311 def convertFromSVS(datasets): 1312 """ 1313 Read in properties from SVS and convert into a simple dict 1314 """ 1315 content = {} 1316 for dataset in datasets: 1317 # we already have data - interested only in properties 1318 #[[item_1, state_1], [item_2, state_2],...] 1319 data = dataset[0] 1320 params = dataset[1] 1321 content[params.data_id] = {} 1322 content[params.data_id]['fit_data'] = [data, {'checked': 2}, []] 1323 param_dict = {} 1324 param_dict['fitpage_category'] = [params.categorycombobox] 1325 param_dict['fitpage_model'] = [params.formfactorcombobox] 1326 param_dict['fitpage_structure'] = [params.structurecombobox] 1327 param_dict['2D_params'] = [str(params.is_2D)] 1328 param_dict['chainfit_params'] = ["False"] 1329 param_dict['data_id'] = [params.data_id] 1330 param_dict['data_name'] = [params.data_name] 1331 param_dict['is_data'] = [str(params.is_data)] 1332 param_dict['magnetic_params'] = [str(params.magnetic_on)] 1333 param_dict['model_name'] = [params.formfactorcombobox] 1334 param_dict['polydisperse_params'] = [str(params.enable_disp)] 1335 param_dict['q_range_max'] = [str(params.qmax)] 1336 param_dict['q_range_min'] = [str(params.qmin)] 1337 # Smearing is a bit trickier. 4.x has multiple keywords, 1338 # one for each combobox option 1339 if params.enable_smearer: 1340 if params.slit_smearer: 1341 w = 1 1342 elif params.pinhole_smearer: 1343 w = 2 1344 else: 1345 w = 0 1346 param_dict['smearing'] = [str(w)] 1347 # weighting is a bit trickier. 4.x has multiple keywords, 1348 # one for each radio box. 1349 if params.dI_noweight: 1350 w = 2 1351 elif params.dI_didata: 1352 w = 3 1353 elif params.dI_sqrdata: 1354 w = 4 1355 elif params.dI_idata: 1356 w = 5 1357 else: 1358 w = 2 1359 param_dict['weighting'] = [str(w)] 1360 1361 # 4.x multi_factor is really the multiplicity 1362 if params.multi_factor is not None: 1363 param_dict['multiplicity'] = [str(int(params.multi_factor))] 1364 1365 # playing with titles 1366 data.filename = params.file 1367 data.title = params.data_name 1368 data.name = params.data_name 1369 1370 # main parameters 1371 for p in params.parameters: 1372 p_name = p[1] 1373 param_dict[p_name] = [str(p[0]), str(p[2]), None, str(p[5][1]), str(p[6][1])] 1374 # orientation parameters 1375 if params.is_2D: 1376 for p in params.orientation_params: 1377 p_name = p[1] 1378 param_dict[p_name] = [str(p[0]), str(p[2]), None, str(p[5][1]), str(p[6][1])] 1379 1380 # disperse parameters 1381 if params.enable_disp: 1382 for p in params.fittable_param: 1383 p_name = p[1] 1384 param_dict[p_name] = [str(p[0]), str(p[2]), None, str(35), str(3)] 1385 1386 # magnetic parameters 1387 1388 content[params.data_id]['fit_params'] = param_dict 1389 return content 1142 1390 1143 1391 def enum(*sequential, **named):
Note: See TracChangeset
for help on using the changeset viewer.