Fix logging to file and summary for crashed tests
Logging was not being logged to log.txt Crashed tests were not reported properly when running tests in one process Change-Id: Ica2d703d88351cc8a94bad86764ae4376dc79590 Signed-off-by: juraj.linkes <juraj.linkes@pantheon.tech>
This commit is contained in:

committed by
Dave Barach

parent
ffb3813dd3
commit
0219b8dfbf
@ -957,6 +957,7 @@ class VppTestResult(unittest.TestResult):
|
||||
self.verbosity = verbosity
|
||||
self.result_string = None
|
||||
self.printer = TestCasePrinter()
|
||||
self.passed = 0
|
||||
|
||||
def addSuccess(self, test):
|
||||
"""
|
||||
@ -970,6 +971,7 @@ class VppTestResult(unittest.TestResult):
|
||||
% (test.__class__.__name__,
|
||||
test._testMethodName,
|
||||
test._testMethodDoc))
|
||||
self.passed += 1
|
||||
unittest.TestResult.addSuccess(self, test)
|
||||
self.result_string = colorize("OK", GREEN)
|
||||
|
||||
|
@ -59,6 +59,7 @@ def getLogger(name):
|
||||
def get_parallel_logger(stream):
|
||||
logger = logging.getLogger('parallel_logger_{}'.format(stream))
|
||||
logger.propagate = False
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = logging.StreamHandler(stream)
|
||||
handler.setFormatter(color_formatter)
|
||||
handler.setLevel(log_level)
|
||||
|
@ -56,6 +56,7 @@ def test_runner_wrapper(suite, keep_alive_pipe, result_pipe, stdouterr_queue,
|
||||
sys.stdout = stdouterr_queue
|
||||
sys.stderr = stdouterr_queue
|
||||
VppTestCase.logger = logger
|
||||
unittest.installHandler()
|
||||
result = VppTestRunner(keep_alive_pipe=keep_alive_pipe,
|
||||
descriptions=descriptions,
|
||||
verbosity=verbose,
|
||||
@ -87,6 +88,8 @@ class TestCaseWrapper(object):
|
||||
self.last_heard = time.time()
|
||||
self.core_detected_at = None
|
||||
self.failed_tests = []
|
||||
self.fail = False
|
||||
self.fail_addressed = False
|
||||
|
||||
def close_pipes(self):
|
||||
self.keep_alive_child_end.close()
|
||||
@ -167,44 +170,48 @@ def run_forked(testcases):
|
||||
wrapped_testcase_suite.keep_alive_parent_end.recv()
|
||||
wrapped_testcase_suite.last_heard = time.time()
|
||||
|
||||
fail = False
|
||||
if wrapped_testcase_suite.last_heard + test_timeout < time.time() \
|
||||
and not os.path.isfile(
|
||||
"%s/_core_handled" %
|
||||
wrapped_testcase_suite.last_test_temp_dir):
|
||||
fail = True
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Timeout while waiting for child test "
|
||||
"runner process (last test running was "
|
||||
"`%s' in `%s')!" %
|
||||
(wrapped_testcase_suite.last_test,
|
||||
wrapped_testcase_suite.last_test_temp_dir))
|
||||
elif not wrapped_testcase_suite.child.is_alive():
|
||||
fail = True
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Child python process unexpectedly died "
|
||||
"(last test running was `%s' in `%s')!" %
|
||||
(wrapped_testcase_suite.last_test,
|
||||
wrapped_testcase_suite.last_test_temp_dir))
|
||||
elif wrapped_testcase_suite.last_test_temp_dir and \
|
||||
wrapped_testcase_suite.last_test_vpp_binary:
|
||||
core_path = "%s/core" % \
|
||||
wrapped_testcase_suite.last_test_temp_dir
|
||||
if os.path.isfile(core_path):
|
||||
if wrapped_testcase_suite.core_detected_at is None:
|
||||
wrapped_testcase_suite.core_detected_at = time.time()
|
||||
elif wrapped_testcase_suite.core_detected_at + \
|
||||
core_timeout < time.time():
|
||||
if not os.path.isfile(
|
||||
"%s/_core_handled" %
|
||||
wrapped_testcase_suite.
|
||||
last_test_temp_dir):
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Child python process unresponsive and core-"
|
||||
"file exists in test temporary directory!")
|
||||
fail = True
|
||||
if not wrapped_testcase_suite.fail:
|
||||
if wrapped_testcase_suite.last_heard + \
|
||||
test_timeout < time.time() and \
|
||||
not os.path.isfile(
|
||||
"%s/_core_handled" %
|
||||
wrapped_testcase_suite.last_test_temp_dir):
|
||||
wrapped_testcase_suite.fail = True
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Timeout while waiting for child test "
|
||||
"runner process (last test running was "
|
||||
"`%s' in `%s')!" %
|
||||
(wrapped_testcase_suite.last_test,
|
||||
wrapped_testcase_suite.last_test_temp_dir))
|
||||
elif not wrapped_testcase_suite.child.is_alive():
|
||||
wrapped_testcase_suite.fail = True
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Child python process unexpectedly died "
|
||||
"(last test running was `%s' in `%s')!" %
|
||||
(wrapped_testcase_suite.last_test,
|
||||
wrapped_testcase_suite.last_test_temp_dir))
|
||||
elif wrapped_testcase_suite.last_test_temp_dir and \
|
||||
wrapped_testcase_suite.last_test_vpp_binary:
|
||||
core_path = "%s/core" % \
|
||||
wrapped_testcase_suite.last_test_temp_dir
|
||||
if os.path.isfile(core_path):
|
||||
if wrapped_testcase_suite.core_detected_at is None:
|
||||
wrapped_testcase_suite.core_detected_at = \
|
||||
time.time()
|
||||
elif wrapped_testcase_suite.core_detected_at + \
|
||||
core_timeout < time.time():
|
||||
if not os.path.isfile(
|
||||
"%s/_core_handled" %
|
||||
wrapped_testcase_suite.
|
||||
last_test_temp_dir):
|
||||
wrapped_testcase_suite.logger.critical(
|
||||
"Child python process unresponsive and "
|
||||
"core-file exists in test temporary "
|
||||
"directory!")
|
||||
wrapped_testcase_suite.fail = True
|
||||
|
||||
if fail:
|
||||
if wrapped_testcase_suite.fail and not \
|
||||
wrapped_testcase_suite.fail_addressed:
|
||||
failed_dir = os.getenv('VPP_TEST_FAILED_DIR')
|
||||
lttd = os.path.basename(
|
||||
wrapped_testcase_suite.last_test_temp_dir)
|
||||
@ -248,7 +255,7 @@ def run_forked(testcases):
|
||||
spawn_gdb(
|
||||
wrapped_testcase_suite.last_test_vpp_binary,
|
||||
core_path, wrapped_testcase_suite.logger)
|
||||
wrapped_testcase_suite.child.terminate()
|
||||
os.kill(wrapped_testcase_suite.child.pid, signal.SIGINT)
|
||||
try:
|
||||
# terminating the child process tends to leave orphan
|
||||
# VPP process around
|
||||
@ -256,8 +263,7 @@ def run_forked(testcases):
|
||||
except OSError:
|
||||
# already dead
|
||||
pass
|
||||
results.append((wrapped_testcase_suite.testcase_suite, None))
|
||||
finished_testcase_suites.add(wrapped_testcase_suite)
|
||||
wrapped_testcase_suite.fail_addressed = True
|
||||
|
||||
for finished_testcase in finished_testcase_suites:
|
||||
finished_testcase.child.join()
|
||||
@ -395,9 +401,9 @@ class NonPassedResults(dict):
|
||||
self.expectedFailures_id = 'expectedFailures'
|
||||
self.unexpectedSuccesses_id = 'unexpectedSuccesses'
|
||||
self.rerun = []
|
||||
self.passed = 0
|
||||
self[self.failures_id] = 0
|
||||
self[self.errors_id] = 0
|
||||
self[self.crashes_id] = 0
|
||||
self[self.skipped_id] = 0
|
||||
self[self.expectedFailures_id] = 0
|
||||
self[self.unexpectedSuccesses_id] = 0
|
||||
@ -413,7 +419,6 @@ class NonPassedResults(dict):
|
||||
self.results_per_suite[tc_class] = \
|
||||
{self.failures_id: [],
|
||||
self.errors_id: [],
|
||||
self.crashes_id: [],
|
||||
self.skipped_id: [],
|
||||
self.expectedFailures_id: [],
|
||||
self.unexpectedSuccesses_id: []}
|
||||
@ -421,39 +426,28 @@ class NonPassedResults(dict):
|
||||
return True
|
||||
return False
|
||||
|
||||
def add_results(self, testcases, testcase_result,
|
||||
duplicates=None):
|
||||
def add_results(self, testcases, testcase_result_id):
|
||||
for failed_testcase, _ in testcases:
|
||||
if self._add_result(failed_testcase, testcase_result):
|
||||
if duplicates:
|
||||
if failed_testcase not in duplicates:
|
||||
self[testcase_result] += 1
|
||||
else:
|
||||
self[testcase_result] += 1
|
||||
if self._add_result(failed_testcase, testcase_result_id):
|
||||
self[testcase_result_id] += 1
|
||||
|
||||
def add_result(self, testcase_suite, result):
|
||||
retval = 0
|
||||
self.all_testcases += testcase_suite.countTestCases()
|
||||
self.all_testcases += result.testsRun
|
||||
self.passed += result.passed
|
||||
if result:
|
||||
# suite finished properly
|
||||
if not result.wasSuccessful():
|
||||
retval = 1
|
||||
|
||||
self.add_results(result.failures, self.failures_id)
|
||||
self.add_results(result.errors, self.errors_id,
|
||||
result.failures + result.errors)
|
||||
self.add_results(result.errors, self.errors_id)
|
||||
self.add_results(result.skipped, self.skipped_id)
|
||||
self.add_results(result.expectedFailures,
|
||||
self.expectedFailures_id)
|
||||
self.add_results(result.unexpectedSuccesses,
|
||||
self.unexpectedSuccesses_id)
|
||||
|
||||
else:
|
||||
# suite crashed
|
||||
retval = -1
|
||||
self.add_results([(x, None) for x in testcase_suite],
|
||||
self.crashes_id)
|
||||
|
||||
if retval != 0:
|
||||
if concurrent_tests == 1:
|
||||
if result:
|
||||
@ -476,17 +470,13 @@ class NonPassedResults(dict):
|
||||
print('TEST RESULTS:')
|
||||
print(' Executed tests: {}'.format(self.all_testcases))
|
||||
print(' Passed tests: {}'.format(
|
||||
colorize(str(self.all_testcases -
|
||||
self.all_nonpassed), GREEN)))
|
||||
colorize(str(self.passed), GREEN)))
|
||||
if self[self.failures_id] > 0:
|
||||
print(' Failed tests: {}'.format(
|
||||
print(' Failures: {}'.format(
|
||||
colorize(str(self[self.failures_id]), RED)))
|
||||
if self[self.errors_id] > 0:
|
||||
print(' Errored tests: {}'.format(
|
||||
print(' Errors: {}'.format(
|
||||
colorize(str(self[self.errors_id]), RED)))
|
||||
if self[self.crashes_id] > 0:
|
||||
print(' Crashed tests: {}'.format(
|
||||
colorize(str(self[self.crashes_id]), RED)))
|
||||
if self[self.skipped_id] > 0:
|
||||
print(' Skipped tests: {}'.format(
|
||||
colorize(str(self[self.skipped_id]), YELLOW)))
|
||||
@ -505,10 +495,7 @@ class NonPassedResults(dict):
|
||||
self.failures_id]
|
||||
errored_testcases = suite_results[
|
||||
self.errors_id]
|
||||
crashed_testcases = suite_results[
|
||||
self.crashes_id]
|
||||
if len(failed_testcases) or len(errored_testcases) \
|
||||
or len(crashed_testcases):
|
||||
if len(failed_testcases) or len(errored_testcases):
|
||||
print(' Testcase name: {}'.format(
|
||||
colorize(testcase_class, RED)))
|
||||
for failed_test in failed_testcases:
|
||||
@ -519,25 +506,13 @@ class NonPassedResults(dict):
|
||||
print(' ERRORED: {}'.format(
|
||||
colorize(get_test_description(
|
||||
descriptions, failed_test), RED)))
|
||||
for failed_test in crashed_testcases:
|
||||
print(' CRASHED: {}'.format(
|
||||
colorize(get_test_description(
|
||||
descriptions, failed_test), RED)))
|
||||
|
||||
print(double_line_delim)
|
||||
print('')
|
||||
|
||||
@property
|
||||
def all_nonpassed(self):
|
||||
return self[self.failures_id] + self[self.errors_id] + \
|
||||
self[self.crashes_id] + self[self.skipped_id] + \
|
||||
self[self.expectedFailures_id] + \
|
||||
self[self.unexpectedSuccesses_id]
|
||||
|
||||
@property
|
||||
def all_failed(self):
|
||||
return self[self.failures_id] + self[self.errors_id] + \
|
||||
self[self.crashes_id]
|
||||
return self[self.failures_id] + self[self.errors_id]
|
||||
|
||||
|
||||
def parse_results(results):
|
||||
|
Reference in New Issue
Block a user