[1810613] | 1 | import os |
---|
| 2 | import subprocess |
---|
| 3 | import re |
---|
[08dcf6c8] | 4 | import sys |
---|
[849fa92] | 5 | try: |
---|
| 6 | import xmlrunner |
---|
| 7 | except: |
---|
| 8 | print "xmlrunner needs to be installed to run these tests" |
---|
[c04af1b] | 9 | print "Try easy_install unittest-xml-reporting" |
---|
[1810613] | 10 | |
---|
[e89bb46] | 11 | # Check whether we have matplotlib installed |
---|
| 12 | HAS_MPL = True |
---|
| 13 | try: |
---|
| 14 | import matplotlib |
---|
| 15 | except: |
---|
| 16 | HAS_MPL = False |
---|
| 17 | |
---|
[61f557b] | 18 | SKIPPED_DIRS = ["sansrealspace", "calculatorview"] |
---|
[e89bb46] | 19 | if not HAS_MPL: |
---|
| 20 | SKIPPED_DIRS.append("sansguiframe") |
---|
| 21 | |
---|
[295ddf5] | 22 | SANSVIEW_DIR = os.pardir |
---|
[111e52a] | 23 | COMMAND_SEP = ';' |
---|
| 24 | if os.name == 'nt': |
---|
| 25 | COMMAND_SEP = '&' |
---|
[1810613] | 26 | |
---|
| 27 | def run_tests(): |
---|
| 28 | passed = 0 |
---|
| 29 | failed = 0 |
---|
| 30 | n_tests = 0 |
---|
| 31 | n_errors = 0 |
---|
| 32 | n_failures = 0 |
---|
| 33 | |
---|
| 34 | for d in os.listdir(SANSVIEW_DIR): |
---|
| 35 | |
---|
| 36 | # Check for modules to be skipped |
---|
| 37 | if d in SKIPPED_DIRS: |
---|
| 38 | continue |
---|
| 39 | |
---|
| 40 | # Go through modules looking for unit tests |
---|
[295ddf5] | 41 | module_dir = os.path.join(os.getcwd(),SANSVIEW_DIR,d,"test") |
---|
[1810613] | 42 | if os.path.isdir(module_dir): |
---|
| 43 | for f in os.listdir(module_dir): |
---|
| 44 | file_path = os.path.join(module_dir,f) |
---|
| 45 | if os.path.isfile(file_path) and f.startswith("utest_") and f.endswith(".py"): |
---|
| 46 | module_name,_ = os.path.splitext(f) |
---|
[08dcf6c8] | 47 | code = "cd %s%s%s -c \"import sys;import xmlrunner;import unittest;sys.path.insert(0, '%s');" % (module_dir, COMMAND_SEP, sys.executable, module_dir) |
---|
[1810613] | 48 | code += "from %s import *;" % module_name |
---|
[6232a6f] | 49 | code += "unittest.main(testRunner=xmlrunner.XMLTestRunner(output='logs'))\"" |
---|
[1810613] | 50 | proc = subprocess.Popen(code, shell=True, stdout=subprocess.PIPE, stderr = subprocess.STDOUT) |
---|
| 51 | std_out, std_err = proc.communicate() |
---|
[6232a6f] | 52 | has_failed = True |
---|
[117a1d6] | 53 | m = re.search("Ran ([0-9]+) test", std_out) |
---|
[1810613] | 54 | if m is not None: |
---|
[6232a6f] | 55 | has_failed = False |
---|
[1810613] | 56 | n_tests += int(m.group(1)) |
---|
| 57 | |
---|
| 58 | m = re.search("FAILED \(errors=([0-9]+)\)", std_out) |
---|
| 59 | if m is not None: |
---|
| 60 | has_failed = True |
---|
| 61 | n_errors += int(m.group(1)) |
---|
| 62 | |
---|
| 63 | m = re.search("FAILED \(failures=([0-9]+)\)", std_out) |
---|
| 64 | if m is not None: |
---|
| 65 | has_failed = True |
---|
| 66 | n_failures += int(m.group(1)) |
---|
| 67 | |
---|
| 68 | if has_failed: |
---|
| 69 | failed += 1 |
---|
[117a1d6] | 70 | print "Result for %s (%s): FAILED" % (module_name, module_dir) |
---|
[1810613] | 71 | print std_out |
---|
| 72 | else: |
---|
| 73 | passed += 1 |
---|
| 74 | print "Result for %s: SUCCESS" % module_name |
---|
| 75 | |
---|
| 76 | print "\n----------------------------------------------" |
---|
| 77 | print "Results by test modules:" |
---|
| 78 | print " PASSED: %d" % passed |
---|
| 79 | ratio = 100.0*failed/(failed+passed) |
---|
| 80 | print " FAILED: %d (%2.2g%%)" % (failed,ratio) |
---|
| 81 | |
---|
| 82 | print "Results by tests:" |
---|
| 83 | print " Tests run: %d" % n_tests |
---|
| 84 | print " Tests failed: %d" % n_failures |
---|
| 85 | print " Test errors: %d" % n_errors |
---|
| 86 | print "----------------------------------------------" |
---|
| 87 | |
---|
[d7b49576] | 88 | return failed |
---|
[1810613] | 89 | |
---|
| 90 | if __name__ == '__main__': |
---|
[d7b49576] | 91 | if run_tests()>0: |
---|
| 92 | sys.exit(1) |
---|
| 93 | |
---|