[959eb01] | 1 | #!/usr/bin/env python |
---|
[aaf5e49] | 2 | from __future__ import print_function |
---|
| 3 | |
---|
[959eb01] | 4 | import os |
---|
| 5 | import subprocess |
---|
| 6 | import re |
---|
| 7 | import sys |
---|
| 8 | |
---|
| 9 | import logging |
---|
| 10 | import logging.config |
---|
| 11 | LOGGER_CONFIG_FILE = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.ini') |
---|
| 12 | logging.config.fileConfig(LOGGER_CONFIG_FILE) |
---|
| 13 | logger = logging.getLogger(__name__) |
---|
| 14 | |
---|
| 15 | try: |
---|
| 16 | import xmlrunner |
---|
| 17 | except: |
---|
| 18 | logger.error("xmlrunner needs to be installed to run these tests") |
---|
| 19 | logger.error("Try easy_install unittest-xml-reporting") |
---|
| 20 | sys.exit(1) |
---|
| 21 | |
---|
| 22 | # Check whether we have matplotlib installed |
---|
| 23 | HAS_MPL_WX = True |
---|
| 24 | try: |
---|
| 25 | import matplotlib |
---|
| 26 | import wx |
---|
[9e308a3] | 27 | except ImportError: |
---|
[959eb01] | 28 | HAS_MPL_WX = False |
---|
| 29 | |
---|
| 30 | SKIPPED_DIRS = ["sasrealspace", "calculatorview"] |
---|
| 31 | if not HAS_MPL_WX: |
---|
| 32 | SKIPPED_DIRS.append("sasguiframe") |
---|
| 33 | |
---|
| 34 | #COMMAND_SEP = ';' |
---|
| 35 | #if os.name == 'nt': |
---|
| 36 | # COMMAND_SEP = '&' |
---|
| 37 | |
---|
[9e308a3] | 38 | def run_tests(dirs=None, run_all=False): |
---|
[959eb01] | 39 | test_root = os.path.abspath(os.path.dirname(__file__)) |
---|
| 40 | run_one_py = os.path.join(test_root, 'run_one.py') |
---|
| 41 | passed = 0 |
---|
| 42 | failed = 0 |
---|
| 43 | n_tests = 0 |
---|
| 44 | n_errors = 0 |
---|
| 45 | n_failures = 0 |
---|
[b54440d] | 46 | |
---|
[959eb01] | 47 | for d in (dirs if dirs else os.listdir(test_root)): |
---|
[b54440d] | 48 | |
---|
[959eb01] | 49 | # Check for modules to be skipped |
---|
| 50 | if d in SKIPPED_DIRS: |
---|
| 51 | continue |
---|
[b54440d] | 52 | |
---|
[959eb01] | 53 | |
---|
| 54 | # Go through modules looking for unit tests |
---|
| 55 | module_dir = os.path.join(test_root, d, "test") |
---|
| 56 | if os.path.isdir(module_dir): |
---|
| 57 | for f in os.listdir(module_dir): |
---|
| 58 | file_path = os.path.join(module_dir,f) |
---|
| 59 | if os.path.isfile(file_path) and f.startswith("utest_") and f.endswith(".py"): |
---|
| 60 | module_name,_ = os.path.splitext(f) |
---|
| 61 | code = '"%s" %s %s'%(sys.executable, run_one_py, file_path) |
---|
| 62 | proc = subprocess.Popen(code, shell=True, stdout=subprocess.PIPE, stderr = subprocess.STDOUT) |
---|
| 63 | std_out, std_err = proc.communicate() |
---|
[574adc7] | 64 | std_out, std_err = std_out.decode(), (std_err.decode() if std_err else None) |
---|
[be51cf6] | 65 | #print(">>>>>> standard out", file_path, "\n", std_out, "\n>>>>>>>>> end stdout", file_path) |
---|
[959eb01] | 66 | #sys.exit() |
---|
| 67 | m = re.search("Ran ([0-9]+) test", std_out) |
---|
| 68 | if m is not None: |
---|
| 69 | n_tests += int(m.group(1)) |
---|
[b54440d] | 70 | has_tests = True |
---|
| 71 | else: |
---|
| 72 | has_tests = False |
---|
[959eb01] | 73 | |
---|
[b54440d] | 74 | has_failed = "FAILED (" in std_out |
---|
| 75 | m = re.search("FAILED \(.*errors=([0-9]+)", std_out) |
---|
[959eb01] | 76 | if m is not None: |
---|
| 77 | n_errors += int(m.group(1)) |
---|
[b54440d] | 78 | m = re.search("FAILED \(.*failures=([0-9]+)", std_out) |
---|
[959eb01] | 79 | if m is not None: |
---|
| 80 | n_failures += int(m.group(1)) |
---|
[b54440d] | 81 | |
---|
| 82 | if has_failed or not has_tests: |
---|
[959eb01] | 83 | failed += 1 |
---|
[9e308a3] | 84 | modpath = os.path.join(module_dir, module_name+".py") |
---|
| 85 | print("Result for %s: FAILED %s" |
---|
| 86 | % (module_name, os.path.relpath(modpath, os.getcwd()))) |
---|
[be51cf6] | 87 | #print(std_out) |
---|
[959eb01] | 88 | else: |
---|
| 89 | passed += 1 |
---|
[aaf5e49] | 90 | print("Result for %s: SUCCESS" % module_name) |
---|
[959eb01] | 91 | |
---|
[aaf5e49] | 92 | print("\n----------------------------------------------") |
---|
[959eb01] | 93 | if n_tests == 0: |
---|
[aaf5e49] | 94 | print("No tests.") |
---|
[959eb01] | 95 | else: |
---|
[aaf5e49] | 96 | print("Results by test modules:") |
---|
| 97 | print(" PASSED: %d" % passed) |
---|
[959eb01] | 98 | ratio = 100.0*failed/(failed+passed) |
---|
[aaf5e49] | 99 | print(" FAILED: %d (%.0f%%)" % (failed,ratio)) |
---|
[959eb01] | 100 | |
---|
[aaf5e49] | 101 | print("Results by tests:") |
---|
| 102 | print(" Tests run: %d" % n_tests) |
---|
| 103 | print(" Tests failed: %d" % n_failures) |
---|
| 104 | print(" Test errors: %d" % n_errors) |
---|
| 105 | print("----------------------------------------------") |
---|
[b54440d] | 106 | |
---|
[959eb01] | 107 | return failed |
---|
| 108 | |
---|
| 109 | if __name__ == '__main__': |
---|
[9e308a3] | 110 | run_all = (len(sys.argv) > 1 and sys.argv[1] == '-all') |
---|
| 111 | dirs = sys.argv[1:] if not run_all else sys.argv[2:] |
---|
| 112 | if run_tests(dirs=dirs, run_all=run_all)>0: |
---|
[959eb01] | 113 | sys.exit(1) |
---|