tests: support for expected failures
- Add support for @unittest.expectedFailure decorator. Type: improvement Signed-off-by: Klement Sekera <klement.sekera@gmail.com> Change-Id: I761751cda505e962225dc680b97c1fffa96f5176 Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
This commit is contained in:
parent
0157885517
commit
47f3527108
@ -46,6 +46,7 @@ from log import (
|
||||
)
|
||||
from vpp_object import VppObjectRegistry
|
||||
from util import ppp, is_core_present
|
||||
from test_result_code import TestResultCode
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -53,13 +54,6 @@ logger = logging.getLogger(__name__)
|
||||
null_logger = logging.getLogger("VppTestCase")
|
||||
null_logger.addHandler(logging.NullHandler())
|
||||
|
||||
PASS = 0
|
||||
FAIL = 1
|
||||
ERROR = 2
|
||||
SKIP = 3
|
||||
TEST_RUN = 4
|
||||
SKIP_CPU_SHORTAGE = 5
|
||||
|
||||
|
||||
if config.debug_framework:
|
||||
import debug_internal
|
||||
@ -1407,6 +1401,7 @@ class VppTestResult(unittest.TestResult):
|
||||
self.stream = stream
|
||||
self.descriptions = descriptions
|
||||
self.verbosity = verbosity
|
||||
self.result_code = TestResultCode.TEST_RUN
|
||||
self.result_string = None
|
||||
self.runner = runner
|
||||
self.printed = []
|
||||
@ -1418,15 +1413,25 @@ class VppTestResult(unittest.TestResult):
|
||||
:param test:
|
||||
|
||||
"""
|
||||
if self.current_test_case_info:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- addSuccess() %s.%s(%s) called"
|
||||
% (test.__class__.__name__, test._testMethodName, test._testMethodDoc)
|
||||
)
|
||||
self.log_result("addSuccess", test)
|
||||
unittest.TestResult.addSuccess(self, test)
|
||||
self.result_string = colorize("OK", GREEN)
|
||||
self.result_code = TestResultCode.PASS
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
self.send_result_through_pipe(test, PASS)
|
||||
def addExpectedFailure(self, test, err):
|
||||
self.log_result("addExpectedFailure", test, err)
|
||||
super().addExpectedFailure(test, err)
|
||||
self.result_string = colorize("FAIL", GREEN)
|
||||
self.result_code = TestResultCode.EXPECTED_FAIL
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def addUnexpectedSuccess(self, test):
|
||||
self.log_result("addUnexpectedSuccess", test)
|
||||
super().addUnexpectedSuccess(test)
|
||||
self.result_string = colorize("OK", RED)
|
||||
self.result_code = TestResultCode.UNEXPECTED_PASS
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def addSkip(self, test, reason):
|
||||
"""
|
||||
@ -1436,23 +1441,15 @@ class VppTestResult(unittest.TestResult):
|
||||
:param reason:
|
||||
|
||||
"""
|
||||
if self.current_test_case_info:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- addSkip() %s.%s(%s) called, reason is %s"
|
||||
% (
|
||||
test.__class__.__name__,
|
||||
test._testMethodName,
|
||||
test._testMethodDoc,
|
||||
reason,
|
||||
)
|
||||
)
|
||||
self.log_result("addSkip", test, reason=reason)
|
||||
unittest.TestResult.addSkip(self, test, reason)
|
||||
self.result_string = colorize("SKIP", YELLOW)
|
||||
|
||||
if reason == "not enough cpus":
|
||||
self.send_result_through_pipe(test, SKIP_CPU_SHORTAGE)
|
||||
self.result_code = TestResultCode.SKIP_CPU_SHORTAGE
|
||||
else:
|
||||
self.send_result_through_pipe(test, SKIP)
|
||||
self.result_code = TestResultCode.SKIP
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def symlink_failed(self):
|
||||
if self.current_test_case_info:
|
||||
@ -1484,7 +1481,7 @@ class VppTestResult(unittest.TestResult):
|
||||
if pipe:
|
||||
pipe.send((test.id(), result))
|
||||
|
||||
def log_error(self, test, err, fn_name):
|
||||
def log_result(self, fn, test, err=None, reason=None):
|
||||
if self.current_test_case_info:
|
||||
if isinstance(test, unittest.suite._ErrorHolder):
|
||||
test_name = test.description
|
||||
@ -1494,25 +1491,29 @@ class VppTestResult(unittest.TestResult):
|
||||
test._testMethodName,
|
||||
test._testMethodDoc,
|
||||
)
|
||||
extra_msg = ""
|
||||
if err:
|
||||
extra_msg += f", error is {err}"
|
||||
if reason:
|
||||
extra_msg += f", reason is {reason}"
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- %s() %s called, err is %s" % (fn_name, test_name, err)
|
||||
)
|
||||
self.current_test_case_info.logger.debug(
|
||||
"formatted exception is:\n%s" % "".join(format_exception(*err))
|
||||
f"--- {fn}() {test_name} called{extra_msg}"
|
||||
)
|
||||
if err:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"formatted exception is:\n%s" % "".join(format_exception(*err))
|
||||
)
|
||||
|
||||
def add_error(self, test, err, unittest_fn, error_type):
|
||||
if error_type == FAIL:
|
||||
self.log_error(test, err, "addFailure")
|
||||
def add_error(self, test, err, unittest_fn, result_code):
|
||||
self.result_code = result_code
|
||||
if result_code == TestResultCode.FAIL:
|
||||
self.log_result("addFailure", test, err=err)
|
||||
error_type_str = colorize("FAIL", RED)
|
||||
elif error_type == ERROR:
|
||||
self.log_error(test, err, "addError")
|
||||
elif result_code == TestResultCode.ERROR:
|
||||
self.log_result("addError", test, err=err)
|
||||
error_type_str = colorize("ERROR", RED)
|
||||
else:
|
||||
raise Exception(
|
||||
"Error type %s cannot be used to record an "
|
||||
"error or a failure" % error_type
|
||||
)
|
||||
raise Exception(f"Unexpected result code {result_code}")
|
||||
|
||||
unittest_fn(self, test, err)
|
||||
if self.current_test_case_info:
|
||||
@ -1535,7 +1536,7 @@ class VppTestResult(unittest.TestResult):
|
||||
else:
|
||||
self.result_string = "%s [no temp dir]" % error_type_str
|
||||
|
||||
self.send_result_through_pipe(test, error_type)
|
||||
self.send_result_through_pipe(test, result_code)
|
||||
|
||||
def addFailure(self, test, err):
|
||||
"""
|
||||
@ -1545,7 +1546,7 @@ class VppTestResult(unittest.TestResult):
|
||||
:param err: error message
|
||||
|
||||
"""
|
||||
self.add_error(test, err, unittest.TestResult.addFailure, FAIL)
|
||||
self.add_error(test, err, unittest.TestResult.addFailure, TestResultCode.FAIL)
|
||||
|
||||
def addError(self, test, err):
|
||||
"""
|
||||
@ -1555,7 +1556,7 @@ class VppTestResult(unittest.TestResult):
|
||||
:param err: error message
|
||||
|
||||
"""
|
||||
self.add_error(test, err, unittest.TestResult.addError, ERROR)
|
||||
self.add_error(test, err, unittest.TestResult.addError, TestResultCode.ERROR)
|
||||
|
||||
def getDescription(self, test):
|
||||
"""
|
||||
@ -1634,23 +1635,40 @@ class VppTestResult(unittest.TestResult):
|
||||
"""
|
||||
unittest.TestResult.stopTest(self, test)
|
||||
|
||||
result_code_to_suffix = {
|
||||
TestResultCode.PASS: "",
|
||||
TestResultCode.FAIL: "",
|
||||
TestResultCode.ERROR: "",
|
||||
TestResultCode.SKIP: "",
|
||||
TestResultCode.TEST_RUN: "",
|
||||
TestResultCode.SKIP_CPU_SHORTAGE: "",
|
||||
TestResultCode.EXPECTED_FAIL: " [EXPECTED FAIL]",
|
||||
TestResultCode.UNEXPECTED_PASS: " [UNEXPECTED PASS]",
|
||||
}
|
||||
|
||||
if self.verbosity > 0:
|
||||
self.stream.writeln(single_line_delim)
|
||||
self.stream.writeln(
|
||||
"%-73s%s" % (self.getDescription(test), self.result_string)
|
||||
"%-72s%s%s"
|
||||
% (
|
||||
self.getDescription(test),
|
||||
self.result_string,
|
||||
result_code_to_suffix[self.result_code],
|
||||
)
|
||||
)
|
||||
self.stream.writeln(single_line_delim)
|
||||
else:
|
||||
self.stream.writeln(
|
||||
"%-68s %4.2f %s"
|
||||
"%-67s %4.2f %s%s"
|
||||
% (
|
||||
self.getDescription(test),
|
||||
time.time() - self.start_test,
|
||||
self.result_string,
|
||||
result_code_to_suffix[self.result_code],
|
||||
)
|
||||
)
|
||||
|
||||
self.send_result_through_pipe(test, TEST_RUN)
|
||||
self.send_result_through_pipe(test, TestResultCode.TEST_RUN)
|
||||
|
||||
def printErrors(self):
|
||||
"""
|
||||
|
@ -52,6 +52,7 @@ from scapy.layers.inet import IPerror, TCPerror, UDPerror, ICMPerror
|
||||
from scapy.layers.inet6 import ICMPv6DestUnreach, ICMPv6EchoRequest
|
||||
from scapy.layers.inet6 import ICMPv6EchoReply
|
||||
from vpp_running import use_running
|
||||
from test_result_code import TestResultCode
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -60,13 +61,6 @@ logger = logging.getLogger(__name__)
|
||||
null_logger = logging.getLogger("VppTestCase")
|
||||
null_logger.addHandler(logging.NullHandler())
|
||||
|
||||
PASS = 0
|
||||
FAIL = 1
|
||||
ERROR = 2
|
||||
SKIP = 3
|
||||
TEST_RUN = 4
|
||||
SKIP_CPU_SHORTAGE = 5
|
||||
|
||||
|
||||
if config.debug_framework:
|
||||
import debug_internal
|
||||
@ -1666,6 +1660,7 @@ class VppTestResult(unittest.TestResult):
|
||||
self.stream = stream
|
||||
self.descriptions = descriptions
|
||||
self.verbosity = verbosity
|
||||
self.result_code = TestResultCode.TEST_RUN
|
||||
self.result_string = None
|
||||
self.runner = runner
|
||||
self.printed = []
|
||||
@ -1677,15 +1672,25 @@ class VppTestResult(unittest.TestResult):
|
||||
:param test:
|
||||
|
||||
"""
|
||||
if self.current_test_case_info:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- addSuccess() %s.%s(%s) called"
|
||||
% (test.__class__.__name__, test._testMethodName, test._testMethodDoc)
|
||||
)
|
||||
self.log_result("addSuccess", test)
|
||||
unittest.TestResult.addSuccess(self, test)
|
||||
self.result_string = colorize("OK", GREEN)
|
||||
self.result_code = TestResultCode.PASS
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
self.send_result_through_pipe(test, PASS)
|
||||
def addExpectedFailure(self, test, err):
|
||||
self.log_result("addExpectedFailure", test, err)
|
||||
super().addExpectedFailure(test, err)
|
||||
self.result_string = colorize("FAIL", GREEN)
|
||||
self.result_code = TestResultCode.EXPECTED_FAIL
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def addUnexpectedSuccess(self, test):
|
||||
self.log_result("addUnexpectedSuccess", test)
|
||||
super().addUnexpectedSuccess(test)
|
||||
self.result_string = colorize("OK", RED)
|
||||
self.result_code = TestResultCode.UNEXPECTED_PASS
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def addSkip(self, test, reason):
|
||||
"""
|
||||
@ -1695,23 +1700,15 @@ class VppTestResult(unittest.TestResult):
|
||||
:param reason:
|
||||
|
||||
"""
|
||||
if self.current_test_case_info:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- addSkip() %s.%s(%s) called, reason is %s"
|
||||
% (
|
||||
test.__class__.__name__,
|
||||
test._testMethodName,
|
||||
test._testMethodDoc,
|
||||
reason,
|
||||
)
|
||||
)
|
||||
self.log_result("addSkip", test, reason=reason)
|
||||
unittest.TestResult.addSkip(self, test, reason)
|
||||
self.result_string = colorize("SKIP", YELLOW)
|
||||
|
||||
if reason == "not enough cpus":
|
||||
self.send_result_through_pipe(test, SKIP_CPU_SHORTAGE)
|
||||
self.result_code = TestResultCode.SKIP_CPU_SHORTAGE
|
||||
else:
|
||||
self.send_result_through_pipe(test, SKIP)
|
||||
self.result_code = TestResultCode.SKIP
|
||||
self.send_result_through_pipe(test, self.result_code)
|
||||
|
||||
def symlink_failed(self):
|
||||
if self.current_test_case_info:
|
||||
@ -1743,7 +1740,7 @@ class VppTestResult(unittest.TestResult):
|
||||
if pipe:
|
||||
pipe.send((test.id(), result))
|
||||
|
||||
def log_error(self, test, err, fn_name):
|
||||
def log_result(self, fn, test, err=None, reason=None):
|
||||
if self.current_test_case_info:
|
||||
if isinstance(test, unittest.suite._ErrorHolder):
|
||||
test_name = test.description
|
||||
@ -1753,25 +1750,29 @@ class VppTestResult(unittest.TestResult):
|
||||
test._testMethodName,
|
||||
test._testMethodDoc,
|
||||
)
|
||||
extra_msg = ""
|
||||
if err:
|
||||
extra_msg += f", error is {err}"
|
||||
if reason:
|
||||
extra_msg += f", reason is {reason}"
|
||||
self.current_test_case_info.logger.debug(
|
||||
"--- %s() %s called, err is %s" % (fn_name, test_name, err)
|
||||
)
|
||||
self.current_test_case_info.logger.debug(
|
||||
"formatted exception is:\n%s" % "".join(format_exception(*err))
|
||||
f"--- {fn}() {test_name} called{extra_msg}"
|
||||
)
|
||||
if err:
|
||||
self.current_test_case_info.logger.debug(
|
||||
"formatted exception is:\n%s" % "".join(format_exception(*err))
|
||||
)
|
||||
|
||||
def add_error(self, test, err, unittest_fn, error_type):
|
||||
if error_type == FAIL:
|
||||
self.log_error(test, err, "addFailure")
|
||||
def add_error(self, test, err, unittest_fn, result_code):
|
||||
self.result_code = result_code
|
||||
if result_code == TestResultCode.FAIL:
|
||||
self.log_result("addFailure", test, err=err)
|
||||
error_type_str = colorize("FAIL", RED)
|
||||
elif error_type == ERROR:
|
||||
self.log_error(test, err, "addError")
|
||||
elif result_code == TestResultCode.ERROR:
|
||||
self.log_result("addError", test, err=err)
|
||||
error_type_str = colorize("ERROR", RED)
|
||||
else:
|
||||
raise Exception(
|
||||
"Error type %s cannot be used to record an "
|
||||
"error or a failure" % error_type
|
||||
)
|
||||
raise Exception(f"Unexpected result code {result_code}")
|
||||
|
||||
unittest_fn(self, test, err)
|
||||
if self.current_test_case_info:
|
||||
@ -1794,7 +1795,7 @@ class VppTestResult(unittest.TestResult):
|
||||
else:
|
||||
self.result_string = "%s [no temp dir]" % error_type_str
|
||||
|
||||
self.send_result_through_pipe(test, error_type)
|
||||
self.send_result_through_pipe(test, result_code)
|
||||
|
||||
def addFailure(self, test, err):
|
||||
"""
|
||||
@ -1804,7 +1805,7 @@ class VppTestResult(unittest.TestResult):
|
||||
:param err: error message
|
||||
|
||||
"""
|
||||
self.add_error(test, err, unittest.TestResult.addFailure, FAIL)
|
||||
self.add_error(test, err, unittest.TestResult.addFailure, TestResultCode.FAIL)
|
||||
|
||||
def addError(self, test, err):
|
||||
"""
|
||||
@ -1814,7 +1815,7 @@ class VppTestResult(unittest.TestResult):
|
||||
:param err: error message
|
||||
|
||||
"""
|
||||
self.add_error(test, err, unittest.TestResult.addError, ERROR)
|
||||
self.add_error(test, err, unittest.TestResult.addError, TestResultCode.ERROR)
|
||||
|
||||
def getDescription(self, test):
|
||||
"""
|
||||
@ -1907,23 +1908,40 @@ class VppTestResult(unittest.TestResult):
|
||||
"""
|
||||
unittest.TestResult.stopTest(self, test)
|
||||
|
||||
result_code_to_suffix = {
|
||||
TestResultCode.PASS: "",
|
||||
TestResultCode.FAIL: "",
|
||||
TestResultCode.ERROR: "",
|
||||
TestResultCode.SKIP: "",
|
||||
TestResultCode.TEST_RUN: "",
|
||||
TestResultCode.SKIP_CPU_SHORTAGE: "",
|
||||
TestResultCode.EXPECTED_FAIL: " [EXPECTED FAIL]",
|
||||
TestResultCode.UNEXPECTED_PASS: " [UNEXPECTED PASS]",
|
||||
}
|
||||
|
||||
if self.verbosity > 0:
|
||||
self.stream.writeln(single_line_delim)
|
||||
self.stream.writeln(
|
||||
"%-73s%s" % (self.getDescription(test), self.result_string)
|
||||
"%-72s%s%s"
|
||||
% (
|
||||
self.getDescription(test),
|
||||
self.result_string,
|
||||
result_code_to_suffix[self.result_code],
|
||||
)
|
||||
)
|
||||
self.stream.writeln(single_line_delim)
|
||||
else:
|
||||
self.stream.writeln(
|
||||
"%-68s %4.2f %s"
|
||||
"%-67s %4.2f %s%s"
|
||||
% (
|
||||
self.getDescription(test),
|
||||
time.time() - self.start_test,
|
||||
self.result_string,
|
||||
result_code_to_suffix[self.result_code],
|
||||
)
|
||||
)
|
||||
|
||||
self.send_result_through_pipe(test, TEST_RUN)
|
||||
self.send_result_through_pipe(test, TestResultCode.TEST_RUN)
|
||||
|
||||
def printErrors(self):
|
||||
"""
|
||||
|
@ -19,13 +19,8 @@ from framework import (
|
||||
VppTestCase,
|
||||
get_testcase_doc_name,
|
||||
get_test_description,
|
||||
PASS,
|
||||
FAIL,
|
||||
ERROR,
|
||||
SKIP,
|
||||
TEST_RUN,
|
||||
SKIP_CPU_SHORTAGE,
|
||||
)
|
||||
from test_result_code import TestResultCode
|
||||
from debug import spawn_gdb
|
||||
from log import (
|
||||
get_parallel_logger,
|
||||
@ -70,12 +65,8 @@ StreamQueueManager.register("StreamQueue", StreamQueue)
|
||||
class TestResult(dict):
|
||||
def __init__(self, testcase_suite, testcases_by_id=None):
|
||||
super(TestResult, self).__init__()
|
||||
self[PASS] = []
|
||||
self[FAIL] = []
|
||||
self[ERROR] = []
|
||||
self[SKIP] = []
|
||||
self[SKIP_CPU_SHORTAGE] = []
|
||||
self[TEST_RUN] = []
|
||||
for trc in list(TestResultCode):
|
||||
self[trc] = []
|
||||
self.crashed = False
|
||||
self.testcase_suite = testcase_suite
|
||||
self.testcases = [testcase for testcase in testcase_suite]
|
||||
@ -83,13 +74,19 @@ class TestResult(dict):
|
||||
|
||||
def was_successful(self):
|
||||
return (
|
||||
0 == len(self[FAIL]) == len(self[ERROR])
|
||||
and len(self[PASS] + self[SKIP] + self[SKIP_CPU_SHORTAGE])
|
||||
0
|
||||
== len(self[TestResultCode.FAIL])
|
||||
== len(self[TestResultCode.ERROR])
|
||||
== len(self[TestResultCode.UNEXPECTED_PASS])
|
||||
and len(self[TestResultCode.PASS])
|
||||
+ len(self[TestResultCode.SKIP])
|
||||
+ len(self[TestResultCode.SKIP_CPU_SHORTAGE])
|
||||
+ len(self[TestResultCode.EXPECTED_FAIL])
|
||||
== self.testcase_suite.countTestCases()
|
||||
)
|
||||
|
||||
def no_tests_run(self):
|
||||
return 0 == len(self[TEST_RUN])
|
||||
return 0 == len(self[TestResultCode.TEST_RUN])
|
||||
|
||||
def process_result(self, test_id, result):
|
||||
self[result].append(test_id)
|
||||
@ -98,7 +95,13 @@ class TestResult(dict):
|
||||
rerun_ids = set([])
|
||||
for testcase in self.testcase_suite:
|
||||
tc_id = testcase.id()
|
||||
if tc_id not in self[PASS] + self[SKIP] + self[SKIP_CPU_SHORTAGE]:
|
||||
if (
|
||||
tc_id
|
||||
not in self[TestResultCode.PASS]
|
||||
+ self[TestResultCode.SKIP]
|
||||
+ self[TestResultCode.SKIP_CPU_SHORTAGE]
|
||||
+ self[TestResultCode.EXPECTED_FAIL]
|
||||
):
|
||||
rerun_ids.add(tc_id)
|
||||
if rerun_ids:
|
||||
return suite_from_failed(self.testcase_suite, rerun_ids)
|
||||
@ -518,7 +521,7 @@ def run_forked(testcase_suites):
|
||||
pass
|
||||
wrapped_testcase_suite.result.crashed = True
|
||||
wrapped_testcase_suite.result.process_result(
|
||||
wrapped_testcase_suite.last_test_id, ERROR
|
||||
wrapped_testcase_suite.last_test_id, TestResultCode.ERROR
|
||||
)
|
||||
stop_run = (
|
||||
process_finished_testsuite(
|
||||
@ -735,20 +738,15 @@ class AllResults(dict):
|
||||
super(AllResults, self).__init__()
|
||||
self.all_testcases = 0
|
||||
self.results_per_suite = []
|
||||
self[PASS] = 0
|
||||
self[FAIL] = 0
|
||||
self[ERROR] = 0
|
||||
self[SKIP] = 0
|
||||
self[SKIP_CPU_SHORTAGE] = 0
|
||||
self[TEST_RUN] = 0
|
||||
for trc in list(TestResultCode):
|
||||
self[trc] = 0
|
||||
self.rerun = []
|
||||
self.testsuites_no_tests_run = []
|
||||
|
||||
def add_results(self, result):
|
||||
self.results_per_suite.append(result)
|
||||
result_types = [PASS, FAIL, ERROR, SKIP, TEST_RUN, SKIP_CPU_SHORTAGE]
|
||||
for result_type in result_types:
|
||||
self[result_type] += len(result[result_type])
|
||||
for trc in list(TestResultCode):
|
||||
self[trc] += len(result[trc])
|
||||
|
||||
def add_result(self, result):
|
||||
retval = 0
|
||||
@ -785,19 +783,29 @@ class AllResults(dict):
|
||||
indent_results(
|
||||
[
|
||||
f"Scheduled tests: {self.all_testcases}",
|
||||
f"Executed tests: {self[TEST_RUN]}",
|
||||
f"Passed tests: {colorize(self[PASS], GREEN)}",
|
||||
f"Skipped tests: {colorize(self[SKIP], YELLOW)}"
|
||||
if self[SKIP]
|
||||
f"Executed tests: {self[TestResultCode.TEST_RUN]}",
|
||||
f"Passed tests: {colorize(self[TestResultCode.PASS], GREEN)}",
|
||||
f"Expected failures: {colorize(self[TestResultCode.EXPECTED_FAIL], GREEN)}"
|
||||
if self[TestResultCode.EXPECTED_FAIL]
|
||||
else None,
|
||||
f"Skipped tests: {colorize(self[TestResultCode.SKIP], YELLOW)}"
|
||||
if self[TestResultCode.SKIP]
|
||||
else None,
|
||||
f"Not Executed tests: {colorize(self.not_executed, RED)}"
|
||||
if self.not_executed
|
||||
else None,
|
||||
f"Failures: {colorize(self[FAIL], RED)}" if self[FAIL] else None,
|
||||
f"Errors: {colorize(self[ERROR], RED)}" if self[ERROR] else None,
|
||||
f"Failures: {colorize(self[TestResultCode.FAIL], RED)}"
|
||||
if self[TestResultCode.FAIL]
|
||||
else None,
|
||||
f"Unexpected passes: {colorize(self[TestResultCode.UNEXPECTED_PASS], RED)}"
|
||||
if self[TestResultCode.UNEXPECTED_PASS]
|
||||
else None,
|
||||
f"Errors: {colorize(self[TestResultCode.ERROR], RED)}"
|
||||
if self[TestResultCode.ERROR]
|
||||
else None,
|
||||
"Tests skipped due to lack of CPUS: "
|
||||
f"{colorize(self[SKIP_CPU_SHORTAGE], YELLOW)}"
|
||||
if self[SKIP_CPU_SHORTAGE]
|
||||
f"{colorize(self[TestResultCode.SKIP_CPU_SHORTAGE], YELLOW)}"
|
||||
if self[TestResultCode.SKIP_CPU_SHORTAGE]
|
||||
else None,
|
||||
]
|
||||
)
|
||||
@ -805,43 +813,28 @@ class AllResults(dict):
|
||||
if self.all_failed > 0:
|
||||
print("FAILURES AND ERRORS IN TESTS:")
|
||||
for result in self.results_per_suite:
|
||||
failed_testcase_ids = result[FAIL]
|
||||
errored_testcase_ids = result[ERROR]
|
||||
old_testcase_name = None
|
||||
if failed_testcase_ids:
|
||||
for failed_test_id in failed_testcase_ids:
|
||||
for tr_code, headline in (
|
||||
(TestResultCode.FAIL, "FAILURE"),
|
||||
(TestResultCode.ERROR, "ERROR"),
|
||||
(TestResultCode.UNEXPECTED_PASS, "UNEXPECTED PASS"),
|
||||
):
|
||||
if not result[tr_code]:
|
||||
continue
|
||||
|
||||
for failed_test_id in result[tr_code]:
|
||||
new_testcase_name, test_name = result.get_testcase_names(
|
||||
failed_test_id
|
||||
)
|
||||
if new_testcase_name != old_testcase_name:
|
||||
print(
|
||||
" Testcase name: {}".format(
|
||||
colorize(new_testcase_name, RED)
|
||||
)
|
||||
f" Testcase name: {colorize(new_testcase_name, RED)}"
|
||||
)
|
||||
old_testcase_name = new_testcase_name
|
||||
print(
|
||||
" FAILURE: {} [{}]".format(
|
||||
colorize(test_name, RED), failed_test_id
|
||||
)
|
||||
)
|
||||
if errored_testcase_ids:
|
||||
for errored_test_id in errored_testcase_ids:
|
||||
new_testcase_name, test_name = result.get_testcase_names(
|
||||
errored_test_id
|
||||
)
|
||||
if new_testcase_name != old_testcase_name:
|
||||
print(
|
||||
" Testcase name: {}".format(
|
||||
colorize(new_testcase_name, RED)
|
||||
)
|
||||
)
|
||||
old_testcase_name = new_testcase_name
|
||||
print(
|
||||
" ERROR: {} [{}]".format(
|
||||
colorize(test_name, RED), errored_test_id
|
||||
)
|
||||
f" {headline}: {colorize(test_name, RED)} [{failed_test_id}]"
|
||||
)
|
||||
|
||||
if self.testsuites_no_tests_run:
|
||||
print("TESTCASES WHERE NO TESTS WERE SUCCESSFULLY EXECUTED:")
|
||||
tc_classes = set()
|
||||
@ -851,7 +844,7 @@ class AllResults(dict):
|
||||
for tc_class in tc_classes:
|
||||
print(" {}".format(colorize(tc_class, RED)))
|
||||
|
||||
if self[SKIP_CPU_SHORTAGE]:
|
||||
if self[TestResultCode.SKIP_CPU_SHORTAGE]:
|
||||
print()
|
||||
print(
|
||||
colorize(
|
||||
@ -865,11 +858,15 @@ class AllResults(dict):
|
||||
|
||||
@property
|
||||
def not_executed(self):
|
||||
return self.all_testcases - self[TEST_RUN]
|
||||
return self.all_testcases - self[TestResultCode.TEST_RUN]
|
||||
|
||||
@property
|
||||
def all_failed(self):
|
||||
return self[FAIL] + self[ERROR]
|
||||
return (
|
||||
self[TestResultCode.FAIL]
|
||||
+ self[TestResultCode.ERROR]
|
||||
+ self[TestResultCode.UNEXPECTED_PASS]
|
||||
)
|
||||
|
||||
|
||||
def parse_results(results):
|
||||
|
15
test/test_result_code.py
Normal file
15
test/test_result_code.py
Normal file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from enum import IntEnum, auto, unique
|
||||
|
||||
|
||||
@unique
|
||||
class TestResultCode(IntEnum):
|
||||
PASS = auto()
|
||||
FAIL = auto()
|
||||
ERROR = auto()
|
||||
SKIP = auto()
|
||||
TEST_RUN = auto()
|
||||
SKIP_CPU_SHORTAGE = auto()
|
||||
EXPECTED_FAIL = auto()
|
||||
UNEXPECTED_PASS = auto()
|
Loading…
x
Reference in New Issue
Block a user