-
-
Notifications
You must be signed in to change notification settings - Fork 31.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
gh-127933: Add option to run regression tests in parallel #128003
Changes from all commits
c16bf01
0735395
ff6b286
2813375
e8091fe
9df885a
64bf295
aca3dbd
8470645
8078112
2261649
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
"""Run a test case multiple times in parallel threads.""" | ||
|
||
import copy | ||
import functools | ||
import threading | ||
import unittest | ||
|
||
from unittest import TestCase | ||
|
||
|
||
class ParallelTestCase(TestCase): | ||
def __init__(self, test_case: TestCase, num_threads: int): | ||
self.test_case = test_case | ||
self.num_threads = num_threads | ||
self._testMethodName = test_case._testMethodName | ||
self._testMethodDoc = test_case._testMethodDoc | ||
|
||
def __str__(self): | ||
return f"{str(self.test_case)} [threads={self.num_threads}]" | ||
|
||
def run_worker(self, test_case: TestCase, result: unittest.TestResult, | ||
barrier: threading.Barrier): | ||
barrier.wait() | ||
test_case.run(result) | ||
|
||
def run(self, result=None): | ||
if result is None: | ||
result = test_case.defaultTestResult() | ||
startTestRun = getattr(result, 'startTestRun', None) | ||
stopTestRun = getattr(result, 'stopTestRun', None) | ||
if startTestRun is not None: | ||
startTestRun() | ||
else: | ||
stopTestRun = None | ||
|
||
# Called at the beginning of each test. See TestCase.run. | ||
result.startTest(self) | ||
|
||
cases = [copy.copy(self.test_case) for _ in range(self.num_threads)] | ||
results = [unittest.TestResult() for _ in range(self.num_threads)] | ||
|
||
barrier = threading.Barrier(self.num_threads) | ||
threads = [] | ||
for i, (case, r) in enumerate(zip(cases, results)): | ||
thread = threading.Thread(target=self.run_worker, | ||
args=(case, r, barrier), | ||
name=f"{str(self.test_case)}-{i}", | ||
daemon=True) | ||
ZeroIntensity marked this conversation as resolved.
Show resolved
Hide resolved
|
||
threads.append(thread) | ||
|
||
for thread in threads: | ||
thread.start() | ||
|
||
for threads in threads: | ||
threads.join() | ||
|
||
# Aggregate test results | ||
if all(r.wasSuccessful() for r in results): | ||
result.addSuccess(self) | ||
|
||
# Note: We can't call result.addError, result.addFailure, etc. because | ||
# we no longer have the original exception, just the string format. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. FWIW, another reason not to call those is that they raise right away if failfast is set. |
||
for r in results: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This looks like something that might be worth adding to |
||
if len(r.errors) > 0 or len(r.failures) > 0: | ||
result._mirrorOutput = True | ||
result.errors.extend(r.errors) | ||
result.failures.extend(r.failures) | ||
result.skipped.extend(r.skipped) | ||
result.expectedFailures.extend(r.expectedFailures) | ||
result.unexpectedSuccesses.extend(r.unexpectedSuccesses) | ||
result.collectedDurations.extend(r.collectedDurations) | ||
|
||
if any(r.shouldStop for r in results): | ||
result.stop() | ||
|
||
# Test has finished running | ||
result.stopTest(self) | ||
if stopTestRun is not None: | ||
stopTestRun() |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
Add an option ``--parallel-threads=N`` to the regression test runner that | ||
runs individual tests in multiple threads in parallel in order to find | ||
concurrency bugs. Note that most of the test suite is not yet reviewed for | ||
thread-safety or annotated with ``@thread_unsafe`` when necessary. |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It might be nice to give these threads an informative name, something containing the testcase and the Nth thread it is.