- Timestamp:
- Sep 17, 2017 11:52:57 PM (7 years ago)
- Branches:
- master, ESS_GUI, ESS_GUI_Docs, ESS_GUI_batch_fitting, ESS_GUI_bumps_abstraction, ESS_GUI_iss1116, ESS_GUI_iss879, ESS_GUI_iss959, ESS_GUI_opencl, ESS_GUI_ordering, ESS_GUI_sync_sascalc, magnetic_scatt, release-4.2.2, ticket-1009, ticket-1094-headless, ticket-1242-2d-resolution, ticket-1243, ticket-1249, ticket885, unittest-saveload
- Children:
- 13374be
- Parents:
- ae69c690 (diff), cfd27dd (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas
- Files:
-
- 14 deleted
- 14 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sasgui/perspectives/calculator/model_editor.py
r07ec714 r23359ccb 106 106 self.model2_string = "cylinder" 107 107 self.name = 'Sum' + M_NAME 108 self.factor = 'scale_factor'109 108 self._notes = '' 110 109 self._operator = '+' … … 133 132 self.model2_name = str(self.model2.GetValue()) 134 133 self.good_name = True 135 self.fill_op rator_combox()134 self.fill_operator_combox() 136 135 137 136 def _layout_name(self): … … 491 490 a sum or multiply model then create the appropriate string 492 491 """ 493 494 492 name = '' 495 496 493 if operator == '*': 497 494 name = 'Multi' 498 factor = 'BackGround' 499 f_oper = '+' 495 factor = 'background' 500 496 else: 501 497 name = 'Sum' 502 498 factor = 'scale_factor' 503 f_oper = '*' 504 505 self.factor = factor 499 506 500 self._operator = operator 507 self.explanation = " Plugin Model = %s %s (model1 %s model2)\n" % \508 (self.factor, f_oper, self._operator)501 self.explanation = (" Plugin_model = scale_factor * (model_1 {} " 502 "model_2) + background").format(operator) 509 503 self.explanationctr.SetLabel(self.explanation) 510 504 self.name = name + M_NAME 511 505 512 506 513 def fill_op rator_combox(self):507 def fill_operator_combox(self): 514 508 """ 515 509 fill the current combobox with the operator … … 527 521 return [self.model1_name, self.model2_name] 528 522 529 def write_string(self, fname, name1, name2):523 def write_string(self, fname, model1_name, model2_name): 530 524 """ 531 525 Write and Save file … … 533 527 self.fname = fname 534 528 description = self.desc_tcl.GetValue().lstrip().rstrip() 535 if description == '': 536 description = name1 + self._operator + name2 537 text = self._operator_choice.GetValue() 538 if text.count('+') > 0: 539 factor = 'scale_factor' 540 f_oper = '*' 541 default_val = '1.0' 542 else: 543 factor = 'BackGround' 544 f_oper = '+' 545 default_val = '0.0' 546 path = self.fname 547 try: 548 out_f = open(path, 'w') 549 except: 550 raise 551 lines = SUM_TEMPLATE.split('\n') 552 for line in lines: 553 try: 554 if line.count("scale_factor"): 555 line = line.replace('scale_factor', factor) 556 #print "scale_factor", line 557 if line.count("= %s"): 558 out_f.write(line % (default_val) + "\n") 559 elif line.count("import Model as P1"): 560 if self.is_p1_custom: 561 line = line.replace('#', '') 562 out_f.write(line % name1 + "\n") 563 else: 564 out_f.write(line + "\n") 565 elif line.count("import %s as P1"): 566 if not self.is_p1_custom: 567 line = line.replace('#', '') 568 out_f.write(line % (name1) + "\n") 569 else: 570 out_f.write(line + "\n") 571 elif line.count("import Model as P2"): 572 if self.is_p2_custom: 573 line = line.replace('#', '') 574 out_f.write(line % name2 + "\n") 575 else: 576 out_f.write(line + "\n") 577 elif line.count("import %s as P2"): 578 if not self.is_p2_custom: 579 line = line.replace('#', '') 580 out_f.write(line % (name2) + "\n") 581 else: 582 out_f.write(line + "\n") 583 elif line.count("P1 = find_model"): 584 out_f.write(line % (name1) + "\n") 585 elif line.count("P2 = find_model"): 586 out_f.write(line % (name2) + "\n") 587 588 elif line.count("self.description = '%s'"): 589 out_f.write(line % description + "\n") 590 #elif line.count("run") and line.count("%s"): 591 # out_f.write(line % self._operator + "\n") 592 #elif line.count("evalDistribution") and line.count("%s"): 593 # out_f.write(line % self._operator + "\n") 594 elif line.count("return") and line.count("%s") == 2: 595 #print "line return", line 596 out_f.write(line % (f_oper, self._operator) + "\n") 597 elif line.count("out2")and line.count("%s"): 598 out_f.write(line % self._operator + "\n") 599 else: 600 out_f.write(line + "\n") 601 except: 602 raise 603 out_f.close() 604 #else: 605 # msg = "Name exists already." 529 desc_line = '' 530 if description.strip() != '': 531 # Sasmodels generates a description for us. If the user provides 532 # their own description, add a line to overwrite the sasmodels one 533 desc_line = "\nmodel_info.description = '{}'".format(description) 534 name = os.path.splitext(os.path.basename(self.fname))[0] 535 output = SUM_TEMPLATE.format(name=name, model1=model1_name, 536 model2=model2_name, operator=self._operator, desc_line=desc_line) 537 with open(self.fname, 'w') as out_f: 538 out_f.write(output) 606 539 607 540 def compile_file(self, path): … … 1278 1211 """ 1279 1212 SUM_TEMPLATE = """ 1280 # A sample of an experimental model function for Sum/Multiply(Pmodel1,Pmodel2) 1281 import os 1282 import sys 1283 import copy 1284 import collections 1285 1286 import numpy 1287 1288 from sas.sascalc.fit.pluginmodel import Model1DPlugin 1289 from sasmodels.sasview_model import find_model 1290 1291 class Model(Model1DPlugin): 1292 name = os.path.splitext(os.path.basename(__file__))[0] 1293 is_multiplicity_model = False 1294 def __init__(self, multiplicity=1): 1295 Model1DPlugin.__init__(self, name='') 1296 P1 = find_model('%s') 1297 P2 = find_model('%s') 1298 p_model1 = P1() 1299 p_model2 = P2() 1300 ## Setting model name model description 1301 self.description = '%s' 1302 if self.name.rstrip().lstrip() == '': 1303 self.name = self._get_name(p_model1.name, p_model2.name) 1304 if self.description.rstrip().lstrip() == '': 1305 self.description = p_model1.name 1306 self.description += p_model2.name 1307 self.fill_description(p_model1, p_model2) 1308 1309 ## Define parameters 1310 self.params = collections.OrderedDict() 1311 1312 ## Parameter details [units, min, max] 1313 self.details = {} 1314 ## Magnetic Panrameters 1315 self.magnetic_params = [] 1316 # non-fittable parameters 1317 self.non_fittable = p_model1.non_fittable 1318 self.non_fittable += p_model2.non_fittable 1319 1320 ##models 1321 self.p_model1= p_model1 1322 self.p_model2= p_model2 1323 1324 1325 ## dispersion 1326 self._set_dispersion() 1327 ## Define parameters 1328 self._set_params() 1329 ## New parameter:scaling_factor 1330 self.params['scale_factor'] = %s 1331 1332 ## Parameter details [units, min, max] 1333 self._set_details() 1334 self.details['scale_factor'] = ['', 0.0, numpy.inf] 1335 1336 1337 #list of parameter that can be fitted 1338 self._set_fixed_params() 1339 1340 ## parameters with orientation 1341 self.orientation_params = [] 1342 for item in self.p_model1.orientation_params: 1343 new_item = "p1_" + item 1344 if not new_item in self.orientation_params: 1345 self.orientation_params.append(new_item) 1346 1347 for item in self.p_model2.orientation_params: 1348 new_item = "p2_" + item 1349 if not new_item in self.orientation_params: 1350 self.orientation_params.append(new_item) 1351 ## magnetic params 1352 self.magnetic_params = [] 1353 for item in self.p_model1.magnetic_params: 1354 new_item = "p1_" + item 1355 if not new_item in self.magnetic_params: 1356 self.magnetic_params.append(new_item) 1357 1358 for item in self.p_model2.magnetic_params: 1359 new_item = "p2_" + item 1360 if not new_item in self.magnetic_params: 1361 self.magnetic_params.append(new_item) 1362 # get multiplicity if model provide it, else 1. 1363 try: 1364 multiplicity1 = p_model1.multiplicity 1365 try: 1366 multiplicity2 = p_model2.multiplicity 1367 except: 1368 multiplicity2 = 1 1369 except: 1370 multiplicity1 = 1 1371 multiplicity2 = 1 1372 ## functional multiplicity of the model 1373 self.multiplicity1 = multiplicity1 1374 self.multiplicity2 = multiplicity2 1375 self.multiplicity_info = [] 1376 1377 def _clone(self, obj): 1378 import copy 1379 obj.params = copy.deepcopy(self.params) 1380 obj.description = copy.deepcopy(self.description) 1381 obj.details = copy.deepcopy(self.details) 1382 obj.dispersion = copy.deepcopy(self.dispersion) 1383 obj.p_model1 = self.p_model1.clone() 1384 obj.p_model2 = self.p_model2.clone() 1385 #obj = copy.deepcopy(self) 1386 return obj 1387 1388 def _get_name(self, name1, name2): 1389 p1_name = self._get_upper_name(name1) 1390 if not p1_name: 1391 p1_name = name1 1392 name = p1_name 1393 name += "_and_" 1394 p2_name = self._get_upper_name(name2) 1395 if not p2_name: 1396 p2_name = name2 1397 name += p2_name 1398 return name 1399 1400 def _get_upper_name(self, name=None): 1401 if name is None: 1402 return "" 1403 upper_name = "" 1404 str_name = str(name) 1405 for index in range(len(str_name)): 1406 if str_name[index].isupper(): 1407 upper_name += str_name[index] 1408 return upper_name 1409 1410 def _set_dispersion(self): 1411 self.dispersion = collections.OrderedDict() 1412 ##set dispersion only from p_model 1413 for name , value in self.p_model1.dispersion.iteritems(): 1414 #if name.lower() not in self.p_model1.orientation_params: 1415 new_name = "p1_" + name 1416 self.dispersion[new_name]= value 1417 for name , value in self.p_model2.dispersion.iteritems(): 1418 #if name.lower() not in self.p_model2.orientation_params: 1419 new_name = "p2_" + name 1420 self.dispersion[new_name]= value 1421 1422 def function(self, x=0.0): 1423 return 0 1424 1425 def getProfile(self): 1426 try: 1427 x,y = self.p_model1.getProfile() 1428 except: 1429 x = None 1430 y = None 1431 1432 return x, y 1433 1434 def _set_params(self): 1435 for name , value in self.p_model1.params.iteritems(): 1436 # No 2D-supported 1437 #if name not in self.p_model1.orientation_params: 1438 new_name = "p1_" + name 1439 self.params[new_name]= value 1440 1441 for name , value in self.p_model2.params.iteritems(): 1442 # No 2D-supported 1443 #if name not in self.p_model2.orientation_params: 1444 new_name = "p2_" + name 1445 self.params[new_name]= value 1446 1447 # Set "scale" as initializing 1448 self._set_scale_factor() 1449 1450 1451 def _set_details(self): 1452 for name ,detail in self.p_model1.details.iteritems(): 1453 new_name = "p1_" + name 1454 #if new_name not in self.orientation_params: 1455 self.details[new_name]= detail 1456 1457 for name ,detail in self.p_model2.details.iteritems(): 1458 new_name = "p2_" + name 1459 #if new_name not in self.orientation_params: 1460 self.details[new_name]= detail 1461 1462 def _set_scale_factor(self): 1463 pass 1464 1465 1466 def setParam(self, name, value): 1467 # set param to this (p1, p2) model 1468 self._setParamHelper(name, value) 1469 1470 ## setParam to p model 1471 model_pre = '' 1472 new_name = '' 1473 name_split = name.split('_', 1) 1474 if len(name_split) == 2: 1475 model_pre = name.split('_', 1)[0] 1476 new_name = name.split('_', 1)[1] 1477 if model_pre == "p1": 1478 if new_name in self.p_model1.getParamList(): 1479 self.p_model1.setParam(new_name, value) 1480 elif model_pre == "p2": 1481 if new_name in self.p_model2.getParamList(): 1482 self.p_model2.setParam(new_name, value) 1483 elif name == 'scale_factor': 1484 self.params['scale_factor'] = value 1485 else: 1486 raise ValueError, "Model does not contain parameter %s" % name 1487 1488 def getParam(self, name): 1489 # Look for dispersion parameters 1490 toks = name.split('.') 1491 if len(toks)==2: 1492 for item in self.dispersion.keys(): 1493 # 2D not supported 1494 if item.lower()==toks[0].lower(): 1495 for par in self.dispersion[item]: 1496 if par.lower() == toks[1].lower(): 1497 return self.dispersion[item][par] 1498 else: 1499 # Look for standard parameter 1500 for item in self.params.keys(): 1501 if item.lower()==name.lower(): 1502 return self.params[item] 1503 return 1504 #raise ValueError, "Model does not contain parameter %s" % name 1505 1506 def _setParamHelper(self, name, value): 1507 # Look for dispersion parameters 1508 toks = name.split('.') 1509 if len(toks)== 2: 1510 for item in self.dispersion.keys(): 1511 if item.lower()== toks[0].lower(): 1512 for par in self.dispersion[item]: 1513 if par.lower() == toks[1].lower(): 1514 self.dispersion[item][par] = value 1515 return 1516 else: 1517 # Look for standard parameter 1518 for item in self.params.keys(): 1519 if item.lower()== name.lower(): 1520 self.params[item] = value 1521 return 1522 1523 raise ValueError, "Model does not contain parameter %s" % name 1524 1525 1526 def _set_fixed_params(self): 1527 self.fixed = [] 1528 for item in self.p_model1.fixed: 1529 new_item = "p1" + item 1530 self.fixed.append(new_item) 1531 for item in self.p_model2.fixed: 1532 new_item = "p2" + item 1533 self.fixed.append(new_item) 1534 1535 self.fixed.sort() 1536 1537 1538 def run(self, x = 0.0): 1539 self._set_scale_factor() 1540 return self.params['scale_factor'] %s \ 1541 (self.p_model1.run(x) %s self.p_model2.run(x)) 1542 1543 def runXY(self, x = 0.0): 1544 self._set_scale_factor() 1545 return self.params['scale_factor'] %s \ 1546 (self.p_model1.runXY(x) %s self.p_model2.runXY(x)) 1547 1548 ## Now (May27,10) directly uses the model eval function 1549 ## instead of the for-loop in Base Component. 1550 def evalDistribution(self, x = []): 1551 self._set_scale_factor() 1552 return self.params['scale_factor'] %s \ 1553 (self.p_model1.evalDistribution(x) %s \ 1554 self.p_model2.evalDistribution(x)) 1555 1556 def set_dispersion(self, parameter, dispersion): 1557 value= None 1558 new_pre = parameter.split("_", 1)[0] 1559 new_parameter = parameter.split("_", 1)[1] 1560 try: 1561 if new_pre == 'p1' and \ 1562 new_parameter in self.p_model1.dispersion.keys(): 1563 value= self.p_model1.set_dispersion(new_parameter, dispersion) 1564 if new_pre == 'p2' and \ 1565 new_parameter in self.p_model2.dispersion.keys(): 1566 value= self.p_model2.set_dispersion(new_parameter, dispersion) 1567 self._set_dispersion() 1568 return value 1569 except: 1570 raise 1571 1572 def fill_description(self, p_model1, p_model2): 1573 description = "" 1574 description += "This model gives the summation or multiplication of" 1575 description += "%s and %s. "% ( p_model1.name, p_model2.name ) 1576 self.description += description 1577 1578 if __name__ == "__main__": 1579 m1= Model() 1580 #m1.setParam("p1_scale", 25) 1581 #m1.setParam("p1_length", 1000) 1582 #m1.setParam("p2_scale", 100) 1583 #m1.setParam("p2_rg", 100) 1584 out1 = m1.runXY(0.01) 1585 1586 m2= Model() 1587 #m2.p_model1.setParam("scale", 25) 1588 #m2.p_model1.setParam("length", 1000) 1589 #m2.p_model2.setParam("scale", 100) 1590 #m2.p_model2.setParam("rg", 100) 1591 out2 = m2.p_model1.runXY(0.01) %s m2.p_model2.runXY(0.01)\n 1592 print "My name is %s."% m1.name 1593 print out1, " = ", out2 1594 if out1 == out2: 1595 print "===> Simple Test: Passed!" 1596 else: 1597 print "===> Simple Test: Failed!" 1213 from sasmodels.core import load_model_info 1214 from sasmodels.sasview_model import make_model_from_info 1215 1216 model_info = load_model_info('{model1}{operator}{model2}') 1217 model_info.name = '{name}'{desc_line} 1218 Model = make_model_from_info(model_info) 1598 1219 """ 1599 1600 1220 if __name__ == "__main__": 1601 1221 # app = wx.PySimpleApp() -
src/sas/sasgui/perspectives/fitting/media/fitting_help.rst
r05b0bf6 rca383a0 195 195 the :ref:`Advanced_Plugin_Editor` . 196 196 197 **SasView version 4.2** made it possible to specify whether a plugin created with 198 the *New Plugin Model* dialog is actually a form factor P(Q) or a structure factor 199 S(Q). To do this, simply add one or other of the following lines under the *import* 200 statements. 201 202 For a form factor:: 203 204 form_factor = True 205 206 or for a structure factor:: 207 208 structure_factor = True 209 210 If the plugin is a structure factor it is *also* necessary to add two variables to 211 the parameter list:: 212 213 parameters = [ 214 ['radius_effective', '', 1, [0.0, numpy.inf], 'volume', ''], 215 ['volfraction', '', 1, [0.0, 1.0], '', ''], 216 [...], 217 218 and to the declarations of the functions Iq and Iqxy::: 219 220 def Iq(x , radius_effective, volfraction, ...): 221 222 def Iqxy(x, y, radius_effective, volfraction, ...): 223 224 Such a plugin should then be available in the S(Q) drop-down box on a FitPage (once 225 a P(Q) model has been selected). 226 197 227 Sum|Multi(p1,p2) 198 228 ^^^^^^^^^^^^^^^^ … … 206 236 or:: 207 237 208 Plugin Model = scale_factor * model_1 /* model_2+ background238 Plugin Model = scale_factor * (model1 * model2) + background 209 239 210 240 In the *Easy Sum/Multi Editor* give the new model a function name and brief 211 241 description (to appear under the *Details* button on the *FitPage*). Then select 212 242 two existing models, as p1 and p2, and the required operator, '+' or '*' between 213 them. Finally, click the *Apply* button to generate the model and then click *Close*. 214 215 Any changes to a plugin model generated in this way only become effective *after* it is re-selected from the model drop-down menu on the FitPage. 243 them. Finally, click the *Apply* button to generate and test the model and then click *Close*. 244 245 Any changes to a plugin model generated in this way only become effective *after* it is re-selected 246 from the plugin models drop-down menu on the FitPage. If the model is not listed you can force a 247 recompilation of the plugins by selecting *Fitting* > *Plugin Model Operations* > *Load Plugin Models*. 248 249 **SasView version 4.2** introduced a much simplified and more extensible structure for plugin models 250 generated through the Easy Sum/Multi Editor. For example, the code for a combination of a sphere model 251 with a power law model now looks like this:: 252 253 from sasmodels.core import load_model_info 254 from sasmodels.sasview_model import make_model_from_info 255 256 model_info = load_model_info('sphere+power_law') 257 model_info.name = 'MyPluginModel' 258 model_info.description = 'sphere + power_law' 259 Model = make_model_from_info(model_info) 260 261 To change the models or operators contributing to this plugin it is only necessary to edit the string 262 in the brackets after *load_model_info*, though it would also be a good idea to update the model name 263 and description too!!! 264 265 The model specification string can handle multiple models and combinations of operators (+ or *) which 266 are processed according to normal conventions. Thus 'model1+model2*model3' would be valid and would 267 multiply model2 by model3 before adding model1. In this example, parameters in the *FitPage* would be 268 prefixed A (for model2), B (for model3) and C (for model1). Whilst this might appear a little 269 confusing, unless you were creating a plugin model from multiple instances of the same model the parameter 270 assignments ought to be obvious when you load the plugin. 271 272 If you need to include another plugin model in the model specification string, just prefix the name of 273 that model with *custom*. For instance:: 274 275 sphere+custom.MyPluginModel 276 277 To create a P(Q)*\S(Q) model use the @ symbol instead of * like this:: 278 279 sphere@hardsphere 280 281 This streamlined approach to building complex plugin models from existing library models, or models 282 available on the *Model Marketplace*, also permits the creation of P(Q)*\S(Q) plugin models, something 283 that was not possible in earlier versions of SasView. 216 284 217 285 .. _Advanced_Plugin_Editor: -
src/sas/sasgui/perspectives/fitting/media/plugin.rst
r72100ee re081946 18 18 * By writing a model from scratch outside of SasView (only recommended for 19 19 code monkeys!) 20 21 **What follows below is quite technical. If you just want a helping hand to get 22 started creating your own models see** :ref:`Adding_your_own_models`. 20 23 21 24 Overview -
src/sas/sascalc/dataloader/file_reader_base_class.py
ra78a02f rae69c690 115 115 data.y = np.asarray([data.y[i] for i in ind]).astype(np.float64) 116 116 if data.dx is not None: 117 if len(data.dx) == 0: 118 data.dx = None 119 continue 117 120 data.dx = np.asarray([data.dx[i] for i in ind]).astype(np.float64) 118 121 if data.dxl is not None: … … 121 124 data.dxw = np.asarray([data.dxw[i] for i in ind]).astype(np.float64) 122 125 if data.dy is not None: 126 if len(data.dy) == 0: 127 data.dy = None 128 continue 123 129 data.dy = np.asarray([data.dy[i] for i in ind]).astype(np.float64) 124 130 if data.lam is not None: … … 185 191 self.output = [] 186 192 187 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False): 193 def remove_empty_q_values(self, has_error_dx=False, has_error_dy=False, 194 has_error_dxl=False, has_error_dxw=False): 188 195 """ 189 196 Remove any point where Q == 0 … … 192 199 self.current_dataset.x = self.current_dataset.x[x != 0] 193 200 self.current_dataset.y = self.current_dataset.y[x != 0] 194 self.current_dataset.dy = self.current_dataset.dy[x != 0] if \ 195 has_error_dy else np.zeros(len(self.current_dataset.y)) 196 self.current_dataset.dx = self.current_dataset.dx[x != 0] if \ 197 has_error_dx else np.zeros(len(self.current_dataset.x)) 201 if has_error_dy: 202 self.current_dataset.dy = self.current_dataset.dy[x != 0] 203 if has_error_dx: 204 self.current_dataset.dx = self.current_dataset.dx[x != 0] 205 if has_error_dxl: 206 self.current_dataset.dxl = self.current_dataset.dxl[x != 0] 207 if has_error_dxw: 208 self.current_dataset.dxw = self.current_dataset.dxw[x != 0] 198 209 199 210 def reset_data_list(self, no_lines=0): … … 204 215 x = np.zeros(no_lines) 205 216 y = np.zeros(no_lines) 217 dx = np.zeros(no_lines) 206 218 dy = np.zeros(no_lines) 207 dx = np.zeros(no_lines)208 219 self.current_dataset = plottable_1D(x, y, dx, dy) 209 220 -
src/sas/sascalc/dataloader/readers/cansas_reader.py
ra78a02f rae69c690 130 130 self.current_datainfo.meta_data[PREPROCESS] = self.processing_instructions 131 131 self._parse_entry(entry) 132 has_error_dx = self.current_dataset.dx is not None 133 has_error_dy = self.current_dataset.dy is not None 134 self.remove_empty_q_values(has_error_dx=has_error_dx, 135 has_error_dy=has_error_dy) 136 self.send_to_output() # Combine datasets with DataInfo 137 self.current_datainfo = DataInfo() # Reset DataInfo 132 self.data_cleanup() 138 133 except FileContentsException as fc_exc: 139 134 # File doesn't meet schema - try loading with a less strict schema … … 154 149 self.load_file_and_schema(xml_file) # Reload strict schema so we can find where error are in file 155 150 invalid_xml = self.find_invalid_xml() 156 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 157 raise DataReaderException(invalid_xml) # Handled by base class 151 if invalid_xml != "": 152 invalid_xml = INVALID_XML.format(basename + self.extension) + invalid_xml 153 raise DataReaderException(invalid_xml) # Handled by base class 158 154 except FileContentsException as fc_exc: 159 155 msg = "CanSAS Reader could not load the file {}".format(xml_file) … … 279 275 # I and Q points 280 276 elif tagname == 'I' and isinstance(self.current_dataset, plottable_1D): 281 unit_list = unit.split("|") 282 if len(unit_list) > 1: 283 self.current_dataset.yaxis(unit_list[0].strip(), 284 unit_list[1].strip()) 285 else: 286 self.current_dataset.yaxis("Intensity", unit) 277 self.current_dataset.yaxis("Intensity", unit) 287 278 self.current_dataset.y = np.append(self.current_dataset.y, data_point) 288 279 elif tagname == 'Idev' and isinstance(self.current_dataset, plottable_1D): 289 280 self.current_dataset.dy = np.append(self.current_dataset.dy, data_point) 290 281 elif tagname == 'Q': 291 unit_list = unit.split("|") 292 if len(unit_list) > 1: 293 self.current_dataset.xaxis(unit_list[0].strip(), 294 unit_list[1].strip()) 295 else: 296 self.current_dataset.xaxis("Q", unit) 282 self.current_dataset.xaxis("Q", unit) 297 283 self.current_dataset.x = np.append(self.current_dataset.x, data_point) 298 284 elif tagname == 'Qdev': 299 285 self.current_dataset.dx = np.append(self.current_dataset.dx, data_point) 300 286 elif tagname == 'dQw': 301 if self.current_dataset.dxw is None: 302 self.current_dataset.dxw = np.empty(0) 303 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 287 self.current_dataset.dxw = np.append(self.current_dataset.dxw, data_point) 304 288 elif tagname == 'dQl': 305 if self.current_dataset.dxl is None:306 self.current_dataset.dxl = np.empty(0)307 289 self.current_dataset.dxl = np.append(self.current_dataset.dxl, data_point) 308 290 elif tagname == 'Qmean': … … 312 294 elif tagname == 'Sesans': 313 295 self.current_datainfo.isSesans = bool(data_point) 296 self.current_dataset.xaxis(attr.get('x_axis'), 297 attr.get('x_unit')) 298 self.current_dataset.yaxis(attr.get('y_axis'), 299 attr.get('y_unit')) 314 300 elif tagname == 'yacceptance': 315 301 self.current_datainfo.sample.yacceptance = (data_point, unit) … … 512 498 for error in self.errors: 513 499 self.current_datainfo.errors.add(error) 514 self.errors.clear() 515 self.send_to_output() 500 self.data_cleanup() 501 self.sort_one_d_data() 502 self.sort_two_d_data() 503 self.reset_data_list() 516 504 empty = None 517 505 return self.output[0], empty 506 507 def data_cleanup(self): 508 """ 509 Clean up the data sets and refresh everything 510 :return: None 511 """ 512 has_error_dx = self.current_dataset.dx is not None 513 has_error_dxl = self.current_dataset.dxl is not None 514 has_error_dxw = self.current_dataset.dxw is not None 515 has_error_dy = self.current_dataset.dy is not None 516 self.remove_empty_q_values(has_error_dx=has_error_dx, 517 has_error_dxl=has_error_dxl, 518 has_error_dxw=has_error_dxw, 519 has_error_dy=has_error_dy) 520 self.send_to_output() # Combine datasets with DataInfo 521 self.current_datainfo = DataInfo() # Reset DataInfo 518 522 519 523 def _is_call_local(self): … … 642 646 value_unit = local_unit 643 647 except KeyError: 644 err_msg = "CanSAS reader: unexpected " 645 err_msg += "\"{0}\" unit [{1}]; " 646 err_msg = err_msg.format(tagname, local_unit) 647 err_msg += "expecting [{0}]".format(default_unit) 648 # Do not throw an error for loading Sesans data in cansas xml 649 # This is a temporary fix. 650 if local_unit != "A" and local_unit != 'pol': 651 err_msg = "CanSAS reader: unexpected " 652 err_msg += "\"{0}\" unit [{1}]; " 653 err_msg = err_msg.format(tagname, local_unit) 654 err_msg += "expecting [{0}]".format(default_unit) 648 655 value_unit = local_unit 649 656 except: … … 675 682 di_exists = True 676 683 if dqw_exists and not dql_exists: 677 array_size = self.current_dataset.dxw.size - 1 678 self.current_dataset.dxl = np.append(self.current_dataset.dxl, 679 np.zeros([array_size])) 684 array_size = self.current_dataset.dxw.size 685 self.current_dataset.dxl = np.zeros(array_size) 680 686 elif dql_exists and not dqw_exists: 681 array_size = self.current_dataset.dxl.size - 1 682 self.current_dataset.dxw = np.append(self.current_dataset.dxw, 683 np.zeros([array_size])) 687 array_size = self.current_dataset.dxl.size 688 self.current_dataset.dxw = np.zeros(array_size) 684 689 elif not dql_exists and not dqw_exists and not dq_exists: 685 array_size = self.current_dataset.x.size - 1690 array_size = self.current_dataset.x.size 686 691 self.current_dataset.dx = np.append(self.current_dataset.dx, 687 692 np.zeros([array_size])) 688 693 if not di_exists: 689 array_size = self.current_dataset.y.size - 1694 array_size = self.current_dataset.y.size 690 695 self.current_dataset.dy = np.append(self.current_dataset.dy, 691 696 np.zeros([array_size])) … … 857 862 node.append(point) 858 863 self.write_node(point, "Q", datainfo.x[i], 859 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})864 {'unit': datainfo.x_unit}) 860 865 if len(datainfo.y) >= i: 861 866 self.write_node(point, "I", datainfo.y[i], 862 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})867 {'unit': datainfo.y_unit}) 863 868 if datainfo.dy is not None and len(datainfo.dy) > i: 864 869 self.write_node(point, "Idev", datainfo.dy[i], 865 {'unit': datainfo. _yaxis + " | " + datainfo._yunit})870 {'unit': datainfo.y_unit}) 866 871 if datainfo.dx is not None and len(datainfo.dx) > i: 867 872 self.write_node(point, "Qdev", datainfo.dx[i], 868 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})873 {'unit': datainfo.x_unit}) 869 874 if datainfo.dxw is not None and len(datainfo.dxw) > i: 870 875 self.write_node(point, "dQw", datainfo.dxw[i], 871 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})876 {'unit': datainfo.x_unit}) 872 877 if datainfo.dxl is not None and len(datainfo.dxl) > i: 873 878 self.write_node(point, "dQl", datainfo.dxl[i], 874 {'unit': datainfo. _xaxis + " | " + datainfo._xunit})879 {'unit': datainfo.x_unit}) 875 880 if datainfo.isSesans: 876 sesans = self.create_element("Sesans") 881 sesans_attrib = {'x_axis': datainfo._xaxis, 882 'y_axis': datainfo._yaxis, 883 'x_unit': datainfo.x_unit, 884 'y_unit': datainfo.y_unit} 885 sesans = self.create_element("Sesans", attrib=sesans_attrib) 877 886 sesans.text = str(datainfo.isSesans) 878 node.append(sesans)879 self.write_node( node, "yacceptance", datainfo.sample.yacceptance[0],887 entry_node.append(sesans) 888 self.write_node(entry_node, "yacceptance", datainfo.sample.yacceptance[0], 880 889 {'unit': datainfo.sample.yacceptance[1]}) 881 self.write_node( node, "zacceptance", datainfo.sample.zacceptance[0],890 self.write_node(entry_node, "zacceptance", datainfo.sample.zacceptance[0], 882 891 {'unit': datainfo.sample.zacceptance[1]}) 883 892 -
src/sas/sascalc/dataloader/readers/cansas_reader_HDF5.py
rdcb91cf rcd57c7d4 140 140 141 141 if isinstance(value, h5py.Group): 142 # Set parent class before recursion 142 143 self.parent_class = class_name 143 144 parent_list.append(key) … … 150 151 # Recursion step to access data within the group 151 152 self.read_children(value, parent_list) 153 # Reset parent class when returning from recursive method 154 self.parent_class = class_name 152 155 self.add_intermediate() 153 156 parent_list.remove(key) -
src/sas/sascalc/dataloader/readers/xml_reader.py
rfafe52a rcd57c7d4 134 134 first_error = schema.assertValid(self.xmldoc) 135 135 except etree.DocumentInvalid as err: 136 # Suppress errors for <'any'> elements 137 if "##other" in str(err): 138 return first_error 136 139 first_error = str(err) 137 140 return first_error -
src/sas/sascalc/invariant/invariant.py
r7432acb rb1f20d1 610 610 # Data boundaries for fitting 611 611 qmin = self._data.x[0] 612 qmax = self._data.x[ self._low_extrapolation_npts - 1]612 qmax = self._data.x[int(self._low_extrapolation_npts - 1)] 613 613 614 614 # Extrapolate the low-Q data … … 649 649 # Data boundaries for fitting 650 650 x_len = len(self._data.x) - 1 651 qmin = self._data.x[ x_len - (self._high_extrapolation_npts - 1)]652 qmax = self._data.x[ x_len]651 qmin = self._data.x[int(x_len - (self._high_extrapolation_npts - 1))] 652 qmax = self._data.x[int(x_len)] 653 653 654 654 # fit the data with a model to get the appropriate parameters … … 688 688 if npts_in is None: 689 689 npts_in = self._low_extrapolation_npts 690 q_end = self._data.x[max(0, npts_in - 1)]690 q_end = self._data.x[max(0, int(npts_in - 1))] 691 691 692 692 if q_start >= q_end: … … 714 714 # Get extrapolation range 715 715 if npts_in is None: 716 npts_in = self._high_extrapolation_npts716 npts_in = int(self._high_extrapolation_npts) 717 717 _npts = len(self._data.x) 718 q_start = self._data.x[min(_npts, _npts - npts_in)]718 q_start = self._data.x[min(_npts, int(_npts - npts_in))] 719 719 720 720 if q_start >= q_end: -
src/sas/sasgui/guiframe/config.py
ra1b8fee rce2819b 48 48 '''This work benefited from the use of the SasView application, originally developed under NSF Award DMR-0520547. SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project Grant No 654000.''' 49 49 _acknowledgement_citation = \ 50 '''M. Doucet et al. SasView Version 4.1 , Zenodo, 10.5281/zenodo.438138'''50 '''M. Doucet et al. SasView Version 4.1.2, Zenodo, 10.5281/zenodo.825675''' 51 51 52 52 _acknowledgement = \ -
src/sas/sasgui/guiframe/documentation_window.py
r959eb01 r6a455cd3 75 75 logger.error("Could not find Sphinx documentation at %s \ 76 76 -- has it been built?", file_path) 77 elif WX_SUPPORTS_HTML2: 78 # Complete HTML/CSS support! 79 self.view = html.WebView.New(self) 80 self.view.LoadURL(url) 81 self.Show() 77 #Commenting following 5 lines, so default browser is forced 78 #This is due to CDN mathjax discontinuation of service, intenal help 79 #browser should be back with qt version 80 #Note added by Wojtek Potrzebowski, July 4th 2017 81 # elif WX_SUPPORTS_HTML2: 82 # # Complete HTML/CSS support! 83 # self.view = html.WebView.New(self) 84 # self.view.LoadURL(url) 85 # self.Show() 82 86 else: 83 87 logger.error("No html2 support, popping up a web browser") -
src/sas/sasgui/perspectives/fitting/fitpage.py
red2276f r6a455cd3 1236 1236 wx.PostEvent(self.parent, new_event) 1237 1237 # update list of plugins if new plugin is available 1238 custom_model = CUSTOM_MODEL1239 1238 mod_cat = self.categorybox.GetStringSelection() 1240 if mod_cat == custom_model: 1239 if mod_cat == CUSTOM_MODEL: 1240 temp_id = self.model.id 1241 1241 temp = self.parent.update_model_list() 1242 for v in self.parent.model_dictionary.values(): 1243 if v.id == temp_id: 1244 self.model = v() 1245 break 1242 1246 if temp: 1243 1247 self.model_list_box = temp -
src/sas/sasgui/perspectives/fitting/fitpanel.py
r67b0a99 rc9ecd1b 92 92 # state must be cloned 93 93 state = page.get_state().clone() 94 if data is not None or page.model is not None: 94 # data_list only populated with real data 95 # Fake object in data from page.get_data() if model is selected 96 if len(page.data_list) is not 0 and page.model is not None: 95 97 new_doc = self._manager.state_reader.write_toXML(data, 96 98 state, 97 99 batch_state) 100 # Fit #2 through #n are append to first fit 98 101 if doc is not None and hasattr(doc, "firstChild"): 99 child = new_doc.firstChild.firstChild 100 doc.firstChild.appendChild(child) 102 # Only append if properly formed new_doc 103 if new_doc is not None and hasattr(new_doc, "firstChild"): 104 child = new_doc.firstChild.firstChild 105 doc.firstChild.appendChild(child) 106 # First fit defines the main document 101 107 else: 102 108 doc = new_doc … … 395 401 temp_data = page.get_data() 396 402 if temp_data is not None and temp_data.id in data: 397 self.SetSelection(pos) 398 self.on_close_page(event=None) 399 temp = self.GetSelection() 400 self.DeletePage(temp) 403 self.close_page_with_data(temp_data) 401 404 if self.sim_page is not None: 402 405 if len(self.sim_page.model_list) == 0: … … 404 407 self.SetSelection(pos) 405 408 self.on_close_page(event=None) 406 temp = self.GetSelection() 407 self.DeletePage(temp) 409 self.DeletePage(pos) 408 410 self.sim_page = None 409 411 self.batch_on = False -
src/sas/sasgui/perspectives/fitting/models.py
rb1c2011 rb682c6a 20 20 from sas.sasgui.guiframe.CategoryInstaller import CategoryInstaller 21 21 from sasmodels.sasview_model import load_custom_model, load_standard_models 22 from sas.sasgui.perspectives.fitting.fitpage import CUSTOM_MODEL 22 23 23 24 logger = logging.getLogger(__name__) … … 265 266 temp = {} 266 267 if self.is_changed(): 267 return _find_models() 268 temp = _find_models() 269 self.last_time_dir_modified = time.time() 270 return temp 268 271 logger.info("plugin model : %s" % str(temp)) 269 272 return temp … … 312 315 if os.path.isdir(plugin_dir): 313 316 temp = os.path.getmtime(plugin_dir) 314 if self.last_time_dir_modified !=temp:317 if self.last_time_dir_modified < temp: 315 318 is_modified = True 316 319 self.last_time_dir_modified = temp … … 323 326 new models were added else return empty dictionary 324 327 """ 328 self.plugins = [] 325 329 new_plugins = self.findModels() 326 if len(new_plugins) > 0: 327 for name, plug in new_plugins.iteritems(): 328 if name not in self.stored_plugins.keys(): 329 self.stored_plugins[name] = plug 330 self.plugins.append(plug) 331 self.model_dictionary[name] = plug 332 self.model_combobox.set_list("Plugin Models", self.plugins) 330 if new_plugins: 331 for name, plug in new_plugins.items(): 332 self.stored_plugins[name] = plug 333 self.plugins.append(plug) 334 self.model_dictionary[name] = plug 335 self.model_combobox.set_list(CUSTOM_MODEL, self.plugins) 333 336 return self.model_combobox.get_list() 334 337 else: -
src/sas/sasgui/perspectives/fitting/pagestate.py
r959eb01 rda9b239 617 617 value = "" 618 618 content = line.split(":") 619 if line == '' or len(content) == 1: 620 continue 619 621 name = content[0] 620 622 try:
Note: See TracChangeset
for help on using the changeset viewer.