Skip to content

Commit 90167f7

Browse files
Raise ValueError when requesting more cores than available (#489)
* Raise ValueError when requesting more cores than available * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent b314e72 commit 90167f7

File tree

5 files changed

+22
-6
lines changed

5 files changed

+22
-6
lines changed

executorlib/base/executor.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,15 @@ class ExecutorBase(FutureExecutor):
1818
Base class for the executor.
1919
2020
Args:
21-
FutureExecutor: Base class for the executor.
21+
max_cores (int): defines the number cores which can be used in parallel
2222
"""
2323

24-
def __init__(self):
24+
def __init__(self, max_cores: Optional[int] = None):
2525
"""
2626
Initialize the ExecutorBase class.
2727
"""
2828
cloudpickle_register(ind=3)
29+
self._max_cores = max_cores
2930
self._future_queue: queue.Queue = queue.Queue()
3031
self._process: Optional[RaisingThread] = None
3132

@@ -86,6 +87,15 @@ def submit(self, fn: callable, *args, resource_dict: dict = {}, **kwargs) -> Fut
8687
Returns:
8788
Future: A Future representing the given call.
8889
"""
90+
cores = resource_dict.get("cores", None)
91+
if (
92+
cores is not None
93+
and self._max_cores is not None
94+
and cores > self._max_cores
95+
):
96+
raise ValueError(
97+
"The specified number of cores is larger than the available number of cores."
98+
)
8999
check_resource_dict(function=fn)
90100
f = Future()
91101
self._future_queue.put(

executorlib/cache/executor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(
4848
backend (str, optional): name of the backend used to spawn tasks.
4949
disable_dependencies (boolean): Disable resolving future objects during the submission.
5050
"""
51-
super().__init__()
51+
super().__init__(max_cores=None)
5252
default_resource_dict = {
5353
"cores": 1,
5454
"cwd": None,

executorlib/interactive/executor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(
6060
plot_dependency_graph: bool = False,
6161
**kwargs: Any,
6262
) -> None:
63-
super().__init__()
63+
super().__init__(max_cores=kwargs.get("max_cores", None))
6464
executor = create_executor(*args, **kwargs)
6565
self._set_process(
6666
RaisingThread(

executorlib/interactive/shared.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ def __init__(
131131
executor_kwargs: dict = {},
132132
spawner: BaseSpawner = MpiExecSpawner,
133133
):
134-
super().__init__()
134+
super().__init__(max_cores=executor_kwargs.get("max_cores", None))
135135
executor_kwargs["future_queue"] = self._future_queue
136136
executor_kwargs["spawner"] = spawner
137137
self._set_process(
@@ -183,7 +183,7 @@ def __init__(
183183
executor_kwargs: dict = {},
184184
spawner: BaseSpawner = MpiExecSpawner,
185185
):
186-
super().__init__()
186+
super().__init__(max_cores=executor_kwargs.get("max_cores", None))
187187
executor_kwargs["future_queue"] = self._future_queue
188188
executor_kwargs["spawner"] = spawner
189189
executor_kwargs["max_cores"] = max_cores

tests/test_executor_backend_mpi.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,12 @@ def test_meta_executor_single(self):
5252
self.assertTrue(fs_1.done())
5353
self.assertTrue(fs_2.done())
5454

55+
def test_oversubscribe(self):
56+
with self.assertRaises(ValueError):
57+
with Executor(max_cores=1, backend="local", block_allocation=True) as exe:
58+
cloudpickle_register(ind=1)
59+
fs_1 = exe.submit(calc, 1, resource_dict={"cores": 2})
60+
5561
@unittest.skipIf(
5662
skip_mpi4py_test, "mpi4py is not installed, so the mpi4py tests are skipped."
5763
)

0 commit comments

Comments
 (0)