1 | #!/usr/bin/env python |
---|
2 | import os |
---|
3 | import subprocess |
---|
4 | import re |
---|
5 | import sys |
---|
6 | |
---|
7 | import logging |
---|
8 | import logging.config |
---|
9 | LOGGER_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.ini') |
---|
10 | logging.config.fileConfig(LOGGER_CONFIG_FILE, disable_existing_loggers=False) |
---|
11 | logger = logging.getLogger(__name__) |
---|
12 | |
---|
13 | try: |
---|
14 | import xmlrunner |
---|
15 | except: |
---|
16 | logger.error("xmlrunner needs to be installed to run these tests") |
---|
17 | logger.error("Try easy_install unittest-xml-reporting") |
---|
18 | sys.exit(1) |
---|
19 | |
---|
20 | # Check whether we have matplotlib installed |
---|
21 | HAS_MPL_WX = True |
---|
22 | try: |
---|
23 | import matplotlib |
---|
24 | import wx |
---|
25 | except: |
---|
26 | HAS_MPL_WX = False |
---|
27 | |
---|
28 | SKIPPED_DIRS = ["sasrealspace", "calculatorview"] |
---|
29 | if not HAS_MPL_WX: |
---|
30 | SKIPPED_DIRS.append("sasguiframe") |
---|
31 | |
---|
32 | #COMMAND_SEP = ';' |
---|
33 | #if os.name == 'nt': |
---|
34 | # COMMAND_SEP = '&' |
---|
35 | |
---|
36 | def run_tests(dirs=None, all=False): |
---|
37 | test_root = os.path.abspath(os.path.dirname(__file__)) |
---|
38 | run_one_py = os.path.join(test_root, 'run_one.py') |
---|
39 | passed = 0 |
---|
40 | failed = 0 |
---|
41 | n_tests = 0 |
---|
42 | n_errors = 0 |
---|
43 | n_failures = 0 |
---|
44 | |
---|
45 | for d in (dirs if dirs else os.listdir(test_root)): |
---|
46 | |
---|
47 | # Check for modules to be skipped |
---|
48 | if d in SKIPPED_DIRS: |
---|
49 | continue |
---|
50 | |
---|
51 | # Go through modules looking for unit tests |
---|
52 | module_dir = os.path.join(test_root, d, "test") |
---|
53 | if os.path.isdir(module_dir): |
---|
54 | for f in os.listdir(module_dir): |
---|
55 | file_path = os.path.join(module_dir,f) |
---|
56 | if os.path.isfile(file_path) and f.startswith("utest_") and f.endswith(".py"): |
---|
57 | module_name,_ = os.path.splitext(f) |
---|
58 | code = '"%s" %s %s'%(sys.executable, run_one_py, file_path) |
---|
59 | proc = subprocess.Popen(code, shell=True, stdout=subprocess.PIPE, stderr = subprocess.STDOUT) |
---|
60 | std_out, std_err = proc.communicate() |
---|
61 | #print std_out |
---|
62 | #sys.exit() |
---|
63 | has_failed = True |
---|
64 | m = re.search("Ran ([0-9]+) test", std_out) |
---|
65 | if m is not None: |
---|
66 | has_failed = False |
---|
67 | n_tests += int(m.group(1)) |
---|
68 | |
---|
69 | m = re.search("FAILED \(errors=([0-9]+)\)", std_out) |
---|
70 | if m is not None: |
---|
71 | has_failed = True |
---|
72 | n_errors += int(m.group(1)) |
---|
73 | |
---|
74 | m = re.search("FAILED \(failures=([0-9]+)\)", std_out) |
---|
75 | if m is not None: |
---|
76 | has_failed = True |
---|
77 | n_failures += int(m.group(1)) |
---|
78 | |
---|
79 | if has_failed: |
---|
80 | failed += 1 |
---|
81 | print "Result for %s (%s): FAILED" % (module_name, module_dir) |
---|
82 | print std_out |
---|
83 | else: |
---|
84 | passed += 1 |
---|
85 | print "Result for %s: SUCCESS" % module_name |
---|
86 | |
---|
87 | print "\n----------------------------------------------" |
---|
88 | if n_tests == 0: |
---|
89 | print "No tests." |
---|
90 | else: |
---|
91 | print "Results by test modules:" |
---|
92 | print " PASSED: %d" % passed |
---|
93 | ratio = 100.0*failed/(failed+passed) |
---|
94 | print " FAILED: %d (%.0f%%)" % (failed,ratio) |
---|
95 | |
---|
96 | print "Results by tests:" |
---|
97 | print " Tests run: %d" % n_tests |
---|
98 | print " Tests failed: %d" % n_failures |
---|
99 | print " Test errors: %d" % n_errors |
---|
100 | print "----------------------------------------------" |
---|
101 | |
---|
102 | return failed |
---|
103 | |
---|
104 | if __name__ == '__main__': |
---|
105 | all = (len(sys.argv) > 1 and sys.argv[1] == '-all') |
---|
106 | dirs = sys.argv[1:] if not all else sys.argv[2:] |
---|
107 | if run_tests(dirs=dirs, all=all)>0: |
---|
108 | sys.exit(1) |
---|
109 | |
---|