Consider tests that reach the timeout as failed
diff --git a/gtest_parallel.py b/gtest_parallel.py
index d682dbe..ac16db6 100755
--- a/gtest_parallel.py
+++ b/gtest_parallel.py
@@ -281,13 +281,13 @@
with self.lock:
self.started[task.task_id] = task
- def __register_exit(self, task):
+ def register_exit(self, task):
self.logger.log_exit(task)
self.times.record_test_time(task.test_binary, task.test_name,
task.last_execution_time)
if self.test_results:
self.test_results.log(task.test_name, task.runtime_ms / 1000.0,
- "PASS" if task.exit_code == 0 else "FAIL")
+ task.exit_code)
with self.lock:
self.started.pop(task.task_id)
@@ -300,7 +300,7 @@
for try_number in range(self.times_to_retry + 1):
self.__register_start(task)
task.run()
- self.__register_exit(task)
+ self.register_exit(task)
if task.exit_code == 0:
break
@@ -369,10 +369,15 @@
with open(task.log_file) as f:
for line in f.readlines():
self.out.permanent_line(line.rstrip())
- self.out.permanent_line(
- "[%d/%d] %s returned/aborted with exit code %d (%d ms)" %
- (self.finished_tasks, self.total_tasks, task.test_name,
- task.exit_code, task.runtime_ms))
+ if task.exit_code is None:
+ self.out.permanent_line("[%d/%d] %s aborted after %d ms" %
+ (self.finished_tasks, self.total_tasks,
+ task.test_name, task.runtime_ms))
+ else:
+ self.out.permanent_line(
+ "[%d/%d] %s returned with exit code %d (%d ms)" %
+ (self.finished_tasks, self.total_tasks, task.test_name,
+ task.exit_code, task.runtime_ms))
if self.output_dir is None:
# Try to remove the file 100 times (sleeping for 0.1 second in between).
@@ -446,11 +451,18 @@
"num_failures_by_type": {
"PASS": 0,
"FAIL": 0,
+ "TIMEOUT": 0,
},
"tests": {},
}
- def log(self, test, runtime_seconds, actual_result):
+ def log(self, test, runtime_seconds, exit_code):
+ if exit_code is None:
+ actual_result = "TIMEOUT"
+ elif exit_code == 0:
+ actual_result = "PASS"
+ else:
+ actual_result = "FAIL"
with self.test_results_lock:
self.test_results['num_failures_by_type'][actual_result] += 1
results = self.test_results['tests']
@@ -652,7 +664,7 @@
return sorted(tasks, reverse=True)
-def execute_tasks(tasks, pool_size, task_manager, timeout,
+def execute_tasks(tasks, pool_size, task_manager, timeout_seconds,
serialize_test_cases):
class WorkerFn(object):
def __init__(self, tasks, running_groups):
@@ -693,8 +705,10 @@
t.start()
return t
+ timeout = None
try:
- if timeout:
+ if timeout_seconds:
+ timeout = threading.Timer(timeout_seconds, sigint_handler.interrupt)
timeout.start()
running_groups = set() if serialize_test_cases else None
worker_fn = WorkerFn(tasks, running_groups)
@@ -704,6 +718,9 @@
finally:
if timeout:
timeout.cancel()
+ for task in list(task_manager.started.values()):
+ task.runtime_ms = timeout_seconds * 1000
+ task_manager.register_exit(task)
def default_options_parser():
@@ -838,10 +855,6 @@
if e.errno != errno.EEXIST or not os.path.isdir(options.output_dir):
raise e
- timeout = None
- if options.timeout is not None:
- timeout = threading.Timer(options.timeout, sigint_handler.interrupt)
-
test_results = None
if options.dump_json_test_results is not None:
test_results = CollectTestResults(options.dump_json_test_results)
@@ -856,7 +869,7 @@
tasks = find_tests(binaries, additional_args, options, times)
logger.log_tasks(len(tasks))
- execute_tasks(tasks, options.workers, task_manager, timeout,
+ execute_tasks(tasks, options.workers, task_manager, options.timeout,
options.serialize_test_cases)
print_try_number = options.retry_failed > 0 or options.repeat > 1
diff --git a/gtest_parallel_mocks.py b/gtest_parallel_mocks.py
index d439148..e4822bb 100644
--- a/gtest_parallel_mocks.py
+++ b/gtest_parallel_mocks.py
@@ -73,7 +73,7 @@
def assertRecorded(self, test_id, expected, retries):
test_results = [
- (test_id[1], runtime_ms, 'PASS' if exit_code == 0 else 'FAIL')
+ (test_id[1], runtime_ms / 1000.0, exit_code)
for runtime_ms, exit_code in zip(expected['runtime_ms'][:retries],
expected['exit_code'][:retries])
]
@@ -88,8 +88,16 @@
self.had_running_parallel_groups = False
self.total_tasks_run = 0
+ self.started = {}
+
+ def __register_start(self, task):
+ self.started[task.task_id] = task
+
+ def register_exit(self, task):
+ self.started.pop(task.task_id)
def run_task(self, task):
+ self.__register_start(task)
test_group = task.test_name.split('.')[0]
with self.check_lock:
diff --git a/gtest_parallel_tests.py b/gtest_parallel_tests.py
index 7d8f0a0..df26943 100755
--- a/gtest_parallel_tests.py
+++ b/gtest_parallel_tests.py
@@ -321,6 +321,21 @@
worker.join()
+class TestTimeoutTestCases(unittest.TestCase):
+ def test_task_timeout(self):
+ timeout = 1
+ task = gtest_parallel.Task('test_binary', 'test_name', ['test_command'], 1,
+ None, 'output_dir')
+ tasks = [task]
+
+ task_manager = TaskManagerMock()
+ gtest_parallel.execute_tasks(tasks, 1, task_manager, timeout, True)
+
+ self.assertEqual(1, task_manager.total_tasks_run)
+ self.assertEqual(None, task.exit_code)
+ self.assertEqual(1000, task.runtime_ms)
+
+
class TestTask(unittest.TestCase):
def test_log_file_names(self):
def root():
@@ -416,6 +431,9 @@
def write(*args):
pass
+ def flush(*args):
+ pass
+
with guard_temp_dir() as temp_dir, \
guard_patch_module('sys.stdout', StdoutMock()):
logger = gtest_parallel.FilterFormat(None if drop_output else temp_dir)