[lit] Add a single process mode.

This is helpful for debugging test failures since it removes
the multiprocessing pool from the picture.  This will obviously
slow down the test suite by a few orders of magnitude, so it
should only be used for debugging specific failures.

llvm-svn: 313460
This commit is contained in:
Zachary Turner 2017-09-16 15:31:34 +00:00
parent 762abff698
commit 42b6dcbcef
3 changed files with 63 additions and 50 deletions

View File

@ -21,7 +21,7 @@ class LitConfig(object):
def __init__(self, progname, path, quiet,
useValgrind, valgrindLeakCheck, valgrindArgs,
noExecute, debug, isWindows,
noExecute, debug, isWindows, singleProcess,
params, config_prefix = None,
maxIndividualTestTime = 0,
maxFailures = None,
@ -37,6 +37,7 @@ class LitConfig(object):
self.valgrindUserArgs = list(valgrindArgs)
self.noExecute = noExecute
self.debug = debug
self.singleProcess = singleProcess
self.isWindows = bool(isWindows)
self.params = dict(params)
self.bashPath = None

View File

@ -289,9 +289,10 @@ def main_with_tmp(builtinParameters):
debug_group.add_argument("--show-tests", dest="showTests",
help="Show all discovered tests",
action="store_true", default=False)
debug_group.add_argument("--use-processes", dest="executionStrategy",
help="Run tests in parallel with processes (not threads)",
action="store_const", const="PROCESSES")
debug_group.add_argument("--single-process", dest="singleProcess",
help="Don't run tests in parallel. Intended for debugging "
"single test failures",
action="store_true", default=False)
opts = parser.parse_args()
args = opts.test_paths
@ -341,6 +342,7 @@ def main_with_tmp(builtinParameters):
valgrindLeakCheck = opts.valgrindLeakCheck,
valgrindArgs = opts.valgrindArgs,
noExecute = opts.noExecute,
singleProcess = opts.singleProcess,
debug = opts.debug,
isWindows = isWindows,
params = userParams,

View File

@ -55,6 +55,52 @@ class Run(object):
return _execute_test_impl(test, self.lit_config,
self.parallelism_semaphores)
def execute_tests_in_pool(self, jobs, max_time):
# We need to issue many wait calls, so compute the final deadline and
# subtract time.time() from that as we go along.
deadline = None
if max_time:
deadline = time.time() + max_time
# Start a process pool. Copy over the data shared between all test runs.
# FIXME: Find a way to capture the worker process stderr. If the user
# interrupts the workers before we make it into our task callback, they
# will each raise a KeyboardInterrupt exception and print to stderr at
# the same time.
pool = multiprocessing.Pool(jobs, worker_initializer,
(self.lit_config,
self.parallelism_semaphores))
try:
async_results = [pool.apply_async(worker_run_one_test,
args=(test_index, test),
callback=self.consume_test_result)
for test_index, test in enumerate(self.tests)]
pool.close()
# Wait for all results to come in. The callback that runs in the
# parent process will update the display.
for a in async_results:
if deadline:
a.wait(deadline - time.time())
else:
# Python condition variables cannot be interrupted unless
# they have a timeout. This can make lit unresponsive to
# KeyboardInterrupt, so do a busy wait with a timeout.
while not a.ready():
a.wait(1)
if not a.successful():
a.get() # Exceptions raised here come from the worker.
if self.hit_max_failures:
break
except:
# Stop the workers and wait for any straggling results to come in
# if we exited without waiting on every async result.
pool.terminate()
raise
finally:
pool.join()
def execute_tests(self, display, jobs, max_time=None):
"""
execute_tests(display, jobs, [max_time])
@ -94,52 +140,16 @@ class Run(object):
# our task completion callback.
self.display = display
# We need to issue many wait calls, so compute the final deadline and
# subtract time.time() from that as we go along.
deadline = None
if max_time:
deadline = time.time() + max_time
# Start a process pool. Copy over the data shared between all test runs.
# FIXME: Find a way to capture the worker process stderr. If the user
# interrupts the workers before we make it into our task callback, they
# will each raise a KeyboardInterrupt exception and print to stderr at
# the same time.
pool = multiprocessing.Pool(jobs, worker_initializer,
(self.lit_config,
self.parallelism_semaphores))
try:
self.failure_count = 0
self.hit_max_failures = False
async_results = [pool.apply_async(worker_run_one_test,
args=(test_index, test),
callback=self.consume_test_result)
for test_index, test in enumerate(self.tests)]
pool.close()
# Wait for all results to come in. The callback that runs in the
# parent process will update the display.
for a in async_results:
if deadline:
a.wait(deadline - time.time())
else:
# Python condition variables cannot be interrupted unless
# they have a timeout. This can make lit unresponsive to
# KeyboardInterrupt, so do a busy wait with a timeout.
while not a.ready():
a.wait(1)
if not a.successful():
a.get() # Exceptions raised here come from the worker.
if self.hit_max_failures:
break
except:
# Stop the workers and wait for any straggling results to come in
# if we exited without waiting on every async result.
pool.terminate()
raise
finally:
pool.join()
self.failure_count = 0
self.hit_max_failures = False
if self.lit_config.singleProcess:
global child_lit_config
child_lit_config = self.lit_config
for test_index, test in enumerate(self.tests):
result = worker_run_one_test(test_index, test)
self.consume_test_result(result)
else:
self.execute_tests_in_pool(jobs, max_time)
# Mark any tests that weren't run as UNRESOLVED.
for test in self.tests: