Changeset cbb9551 in sasview for src/sas/sascalc/dataloader
- Timestamp:
- Apr 10, 2017 10:19:20 AM (8 years ago)
- Children:
- 4597637
- Parents:
- 1b9a367 (diff), d26f025 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the (diff) links above to see all the changes relative to each parent. - Location:
- src/sas/sascalc/dataloader
- Files:
-
- 1 added
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
src/sas/sascalc/dataloader/data_info.py
r9a5097c r7432acb 353 353 details = None 354 354 ## SESANS zacceptance 355 zacceptance = None 355 zacceptance = (0,"") 356 yacceptance = (0,"") 356 357 357 358 def __init__(self): … … 805 806 # create zero vector 806 807 dy_other = other.dy 807 if other.dy ==None or (len(other.dy) != len(other.y)):808 if other.dy is None or (len(other.dy) != len(other.y)): 808 809 dy_other = np.zeros(len(other.y)) 809 810 810 811 # Check that we have errors, otherwise create zero vector 811 812 dy = self.dy 812 if self.dy ==None or (len(self.dy) != len(self.y)):813 if self.dy is None or (len(self.dy) != len(self.y)): 813 814 dy = np.zeros(len(self.y)) 814 815 … … 821 822 dy, dy_other = self._validity_check(other) 822 823 result = self.clone_without_data(len(self.x)) 823 if self.dxw ==None:824 if self.dxw is None: 824 825 result.dxw = None 825 826 else: 826 827 result.dxw = np.zeros(len(self.x)) 827 if self.dxl ==None:828 if self.dxl is None: 828 829 result.dxl = None 829 830 else: … … 883 884 self._validity_check_union(other) 884 885 result = self.clone_without_data(len(self.x) + len(other.x)) 885 if self.dy ==None or other.dy is None:886 if self.dy is None or other.dy is None: 886 887 result.dy = None 887 888 else: 888 889 result.dy = np.zeros(len(self.x) + len(other.x)) 889 if self.dx ==None or other.dx is None:890 if self.dx is None or other.dx is None: 890 891 result.dx = None 891 892 else: 892 893 result.dx = np.zeros(len(self.x) + len(other.x)) 893 if self.dxw ==None or other.dxw is None:894 if self.dxw is None or other.dxw is None: 894 895 result.dxw = None 895 896 else: 896 897 result.dxw = np.zeros(len(self.x) + len(other.x)) 897 if self.dxl ==None or other.dxl is None:898 if self.dxl is None or other.dxl is None: 898 899 result.dxl = None 899 900 else: … … 906 907 result.y = np.append(self.y, other.y) 907 908 result.y = result.y[ind] 908 if result.dy !=None:909 if result.dy is not None: 909 910 result.dy = np.append(self.dy, other.dy) 910 911 result.dy = result.dy[ind] … … 1029 1030 # Check that the scales match 1030 1031 err_other = other.err_data 1031 if other.err_data ==None or \1032 if other.err_data is None or \ 1032 1033 (len(other.err_data) != len(other.data)): 1033 1034 err_other = np.zeros(len(other.data)) … … 1035 1036 # Check that we have errors, otherwise create zero vector 1036 1037 err = self.err_data 1037 if self.err_data ==None or \1038 if self.err_data is None or \ 1038 1039 (len(self.err_data) != len(self.data)): 1039 1040 err = np.zeros(len(other.data)) … … 1050 1051 dy, dy_other = self._validity_check(other) 1051 1052 result = self.clone_without_data(np.size(self.data)) 1052 if self.dqx_data == None or self.dqy_data ==None:1053 if self.dqx_data is None or self.dqy_data is None: 1053 1054 result.dqx_data = None 1054 1055 result.dqy_data = None … … 1124 1125 result.ymin = self.ymin 1125 1126 result.ymax = self.ymax 1126 if self.dqx_data == None or self.dqy_data ==None or \1127 other.dqx_data == None or other.dqy_data ==None:1127 if self.dqx_data is None or self.dqy_data is None or \ 1128 other.dqx_data is None or other.dqy_data is None: 1128 1129 result.dqx_data = None 1129 1130 result.dqy_data = None -
src/sas/sascalc/dataloader/loader.py
rb699768 r463e7ffc 32 32 from readers import cansas_reader 33 33 34 logger = logging.getLogger(__name__) 35 34 36 class Registry(ExtensionRegistry): 35 37 """ … … 99 101 msg = "DataLoader couldn't locate DataLoader plugin folder." 100 102 msg += """ "%s" does not exist""" % dir 101 logg ing.warning(msg)103 logger.warning(msg) 102 104 return readers_found 103 105 … … 117 119 msg = "Loader: Error importing " 118 120 msg += "%s\n %s" % (item, sys.exc_value) 119 logg ing.error(msg)121 logger.error(msg) 120 122 121 123 # Process zip files … … 139 141 msg = "Loader: Error importing" 140 142 msg += " %s\n %s" % (mfile, sys.exc_value) 141 logg ing.error(msg)143 logger.error(msg) 142 144 143 145 except: 144 146 msg = "Loader: Error importing " 145 147 msg += " %s\n %s" % (item, sys.exc_value) 146 logg ing.error(msg)148 logger.error(msg) 147 149 148 150 return readers_found … … 190 192 msg = "Loader: Error accessing" 191 193 msg += " Reader in %s\n %s" % (module.__name__, sys.exc_value) 192 logg ing.error(msg)194 logger.error(msg) 193 195 return reader_found 194 196 … … 223 225 msg = "Loader: Error accessing Reader " 224 226 msg += "in %s\n %s" % (loader.__name__, sys.exc_value) 225 logg ing.error(msg)227 logger.error(msg) 226 228 return reader_found 227 229 … … 268 270 msg = "Loader: Error accessing Reader" 269 271 msg += " in %s\n %s" % (module.__name__, sys.exc_value) 270 logg ing.error(msg)272 logger.error(msg) 271 273 return reader_found 272 274 -
src/sas/sascalc/dataloader/manipulations.py
rdd11014 r7432acb 210 210 y[i_q] += frac * data[npts] 211 211 212 if err_data ==None or err_data[npts] == 0.0:212 if err_data is None or err_data[npts] == 0.0: 213 213 if data[npts] < 0: 214 214 data[npts] = -data[npts] … … 333 333 continue 334 334 y += frac * data[npts] 335 if err_data ==None or err_data[npts] == 0.0:335 if err_data is None or err_data[npts] == 0.0: 336 336 if data[npts] < 0: 337 337 data[npts] = -data[npts] … … 422 422 423 423 # Get the dq for resolution averaging 424 if data2D.dqx_data != None and data2D.dqy_data !=None:424 if data2D.dqx_data is not None and data2D.dqy_data is not None: 425 425 # The pinholes and det. pix contribution present 426 426 # in both direction of the 2D which must be subtracted when … … 462 462 463 463 #q_data_max = numpy.max(q_data) 464 if len(data2D.q_data) ==None:464 if len(data2D.q_data) is None: 465 465 msg = "Circular averaging: invalid q_data: %g" % data2D.q_data 466 466 raise RuntimeError, msg … … 502 502 # Take dqs from data to get the q_average 503 503 x[i_q] += frac * q_value 504 if err_data ==None or err_data[npt] == 0.0:504 if err_data is None or err_data[npt] == 0.0: 505 505 if data_n < 0: 506 506 data_n = -data_n … … 508 508 else: 509 509 err_y[i_q] += frac * frac * err_data[npt] * err_data[npt] 510 if dq_data !=None:510 if dq_data is not None: 511 511 # To be consistent with dq calculation in 1d reduction, 512 512 # we need just the averages (not quadratures) because … … 523 523 err_y[n] = -err_y[n] 524 524 err_y[n] = math.sqrt(err_y[n]) 525 #if err_x !=None:525 #if err_x is not None: 526 526 # err_x[n] = math.sqrt(err_x[n]) 527 527 … … 532 532 idx = (numpy.isfinite(y)) & (numpy.isfinite(x)) 533 533 534 if err_x !=None:534 if err_x is not None: 535 535 d_x = err_x[idx] / y_counts[idx] 536 536 else: … … 623 623 phi_bins[i_phi] += frac * data[npt] 624 624 625 if err_data ==None or err_data[npt] == 0.0:625 if err_data is None or err_data[npt] == 0.0: 626 626 if data_n < 0: 627 627 data_n = -data_n … … 777 777 778 778 # Get the dq for resolution averaging 779 if data2D.dqx_data != None and data2D.dqy_data !=None:779 if data2D.dqx_data is not None and data2D.dqy_data is not None: 780 780 # The pinholes and det. pix contribution present 781 781 # in both direction of the 2D which must be subtracted when … … 888 888 y[i_bin] += frac * data_n 889 889 x[i_bin] += frac * q_value 890 if err_data[n] ==None or err_data[n] == 0.0:890 if err_data[n] is None or err_data[n] == 0.0: 891 891 if data_n < 0: 892 892 data_n = -data_n … … 895 895 y_err[i_bin] += frac * frac * err_data[n] * err_data[n] 896 896 897 if dq_data !=None:897 if dq_data is not None: 898 898 # To be consistent with dq calculation in 1d reduction, 899 899 # we need just the averages (not quadratures) because … … 925 925 y_err[y_err == 0] = numpy.average(y_err) 926 926 idx = (numpy.isfinite(y) & numpy.isfinite(y_err)) 927 if x_err !=None:927 if x_err is not None: 928 928 d_x = x_err[idx] / y_counts[idx] 929 929 else: -
src/sas/sascalc/dataloader/readers/ascii_reader.py
r9a5097c r235f514 128 128 if new_lentoks > 2: 129 129 _dy = float(toks[2]) 130 has_error_dy = False if _dy ==None else True130 has_error_dy = False if _dy is None else True 131 131 132 132 # If a 4th row is present, consider it dx 133 133 if new_lentoks > 3: 134 134 _dx = float(toks[3]) 135 has_error_dx = False if _dx ==None else True135 has_error_dx = False if _dx is None else True 136 136 137 137 # Delete the previously stored lines of data candidates if -
src/sas/sascalc/dataloader/readers/associations.py
re5c09cf r959eb01 18 18 import logging 19 19 import json 20 21 logger = logging.getLogger(__name__) 20 22 21 23 FILE_NAME = 'defaults.json' … … 67 69 msg = "read_associations: skipping association" 68 70 msg += " for %s\n %s" % (ext.lower(), sys.exc_value) 69 logg ing.error(msg)71 logger.error(msg) 70 72 else: 71 73 print "Could not find reader association settings\n %s [%s]" % (__file__, os.getcwd()) … … 81 83 :param registry_function: function to be called to register each reader 82 84 """ 83 logg ing.info("register_readers is now obsolete: use read_associations()")85 logger.info("register_readers is now obsolete: use read_associations()") 84 86 import abs_reader 85 87 import ascii_reader -
src/sas/sascalc/dataloader/readers/cansas_constants.py
rad4632c r63d773c 135 135 "Sesans": {"storeas": "content"}, 136 136 "zacceptance": {"storeas": "float"}, 137 "yacceptance": {"storeas": "float"}, 137 138 "<any>" : ANY 138 139 } -
src/sas/sascalc/dataloader/readers/cansas_reader.py
r1b9a367 rcbb9551 33 33 import xml.dom.minidom 34 34 from xml.dom.minidom import parseString 35 36 logger = logging.getLogger(__name__) 35 37 36 38 PREPROCESS = "xmlpreprocess" … … 990 992 :param data1d: presumably a Data1D object 991 993 """ 992 if self.current_dataset ==None:994 if self.current_dataset is None: 993 995 x_vals = np.empty(0) 994 996 y_vals = np.empty(0) … … 1078 1080 # Write the file 1079 1081 file_ref = open(filename, 'w') 1080 if self.encoding ==None:1082 if self.encoding is None: 1081 1083 self.encoding = "UTF-8" 1082 1084 doc.write(file_ref, encoding=self.encoding, … … 1201 1203 :param entry_node: lxml node ElementTree object to be appended to 1202 1204 """ 1203 if datainfo.run ==None or datainfo.run == []:1205 if datainfo.run is None or datainfo.run == []: 1204 1206 datainfo.run.append(RUN_NAME_DEFAULT) 1205 1207 datainfo.run_name[RUN_NAME_DEFAULT] = RUN_NAME_DEFAULT … … 1245 1247 sesans.text = str(datainfo.isSesans) 1246 1248 node.append(sesans) 1249 self.write_node(node, "yacceptance", datainfo.sample.yacceptance[0], 1250 {'unit': datainfo.sample.yacceptance[1]}) 1247 1251 self.write_node(node, "zacceptance", datainfo.sample.zacceptance[0], 1248 1252 {'unit': datainfo.sample.zacceptance[1]}) … … 1317 1321 self.write_node(point, "T", spectrum.transmission[i], 1318 1322 {'unit': spectrum.transmission_unit}) 1319 if spectrum.transmission_deviation !=None \1323 if spectrum.transmission_deviation is not None \ 1320 1324 and len(spectrum.transmission_deviation) >= i: 1321 1325 self.write_node(point, "Tdev", … … 1397 1401 str(datainfo.source.name)) 1398 1402 self.append(source, instr) 1399 if datainfo.source.radiation ==None or datainfo.source.radiation == '':1403 if datainfo.source.radiation is None or datainfo.source.radiation == '': 1400 1404 datainfo.source.radiation = "neutron" 1401 1405 self.write_node(source, "radiation", datainfo.source.radiation) … … 1438 1442 :param instr: lxml node ElementTree object to be appended to 1439 1443 """ 1440 if datainfo.collimation == [] or datainfo.collimation ==None:1444 if datainfo.collimation == [] or datainfo.collimation is None: 1441 1445 coll = Collimation() 1442 1446 datainfo.collimation.append(coll) … … 1483 1487 :param inst: lxml instrument node to be appended to 1484 1488 """ 1485 if datainfo.detector ==None or datainfo.detector == []:1489 if datainfo.detector is None or datainfo.detector == []: 1486 1490 det = Detector() 1487 1491 det.name = "" … … 1659 1663 local_unit = None 1660 1664 exec "local_unit = storage.%s_unit" % toks[0] 1661 if local_unit !=None and units.lower() != local_unit.lower():1665 if local_unit is not None and units.lower() != local_unit.lower(): 1662 1666 if HAS_CONVERTER == True: 1663 1667 try: … … 1672 1676 self.errors.add(err_mess) 1673 1677 if optional: 1674 logg ing.info(err_mess)1678 logger.info(err_mess) 1675 1679 else: 1676 1680 raise ValueError, err_mess … … 1681 1685 self.errors.add(err_mess) 1682 1686 if optional: 1683 logg ing.info(err_mess)1687 logger.info(err_mess) 1684 1688 else: 1685 1689 raise ValueError, err_mess -
src/sas/sascalc/dataloader/readers/danse_reader.py
r9a5097c r235f514 19 19 from sas.sascalc.dataloader.data_info import Data2D, Detector 20 20 from sas.sascalc.dataloader.manipulations import reader2D_converter 21 22 logger = logging.getLogger(__name__) 21 23 22 24 # Look for unit converter … … 142 144 error.append(err) 143 145 except: 144 logg ing.info("Skipping line:%s,%s" %(data_str,146 logger.info("Skipping line:%s,%s" %(data_str, 145 147 sys.exc_value)) 146 148 … … 164 166 165 167 x_vals.append(qx) 166 if xmin ==None or qx < xmin:168 if xmin is None or qx < xmin: 167 169 xmin = qx 168 if xmax ==None or qx > xmax:170 if xmax is None or qx > xmax: 169 171 xmax = qx 170 172 … … 179 181 180 182 y_vals.append(qy) 181 if ymin ==None or qy < ymin:183 if ymin is None or qy < ymin: 182 184 ymin = qy 183 if ymax ==None or qy > ymax:185 if ymax is None or qy > ymax: 184 186 ymax = qy 185 187 … … 196 198 msg = "Skipping entry (v1.0):%s,%s" % (str(data[i_pt]), 197 199 sys.exc_value) 198 logg ing.info(msg)200 logger.info(msg) 199 201 200 202 # Get bin number … … 271 273 raise ValueError, msg 272 274 else: 273 logg ing.info("Danse_reader Reading %s \n" % filename)275 logger.info("Danse_reader Reading %s \n" % filename) 274 276 275 277 # Store loading process information -
src/sas/sascalc/dataloader/readers/tiff_reader.py
r9a5097c r959eb01 16 16 from sas.sascalc.dataloader.data_info import Data2D 17 17 from sas.sascalc.dataloader.manipulations import reader2D_converter 18 18 19 logger = logging.getLogger(__name__) 20 19 21 class Reader: 20 22 """ … … 76 78 value = float(val) 77 79 except: 78 logg ing.error("tiff_reader: had to skip a non-float point")80 logger.error("tiff_reader: had to skip a non-float point") 79 81 continue 80 82 -
src/sas/sascalc/dataloader/readers/xml_reader.py
ra235f715 r235f514 18 18 from lxml import etree 19 19 from lxml.builder import E 20 21 logger = logging.getLogger(__name__) 20 22 21 23 PARSER = etree.ETCompatXMLParser(remove_comments=True, remove_pis=False) … … 71 73 self.xmlroot = self.xmldoc.getroot() 72 74 except etree.XMLSyntaxError as xml_error: 73 logg ing.info(xml_error)75 logger.info(xml_error) 74 76 except Exception: 75 77 self.xml = None … … 88 90 self.xmlroot = etree.fromstring(tag_soup) 89 91 except etree.XMLSyntaxError as xml_error: 90 logg ing.info(xml_error)92 logger.info(xml_error) 91 93 except Exception: 92 94 self.xml = None … … 102 104 self.schemadoc = etree.parse(self.schema, parser=PARSER) 103 105 except etree.XMLSyntaxError as xml_error: 104 logg ing.info(xml_error)106 logger.info(xml_error) 105 107 except Exception: 106 108 self.schema = None … … 238 240 :param name: The name of the element to be created 239 241 """ 240 if attrib ==None:242 if attrib is None: 241 243 attrib = {} 242 244 return etree.Element(name, attrib, nsmap) … … 297 299 """ 298 300 text = str(text) 299 if attrib ==None:301 if attrib is None: 300 302 attrib = {} 301 303 elem = E(elementname, attrib, text)
Note: See TracChangeset
for help on using the changeset viewer.