Skip to content

Commit 5c6ce0b

Browse files
FIX_123 (#133)
* FIX_123 * Better debug msg * at least 1 config in regression * Return self in _fit()
1 parent a10ab29 commit 5c6ce0b

File tree

9 files changed

+11
-41
lines changed

9 files changed

+11
-41
lines changed

autoPyTorch/api/base_task.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ def set_pipeline_config(
227227
Args:
228228
**pipeline_config_kwargs: Valid config options include "num_run",
229229
"device", "budget_type", "epochs", "runtime", "torch_num_threads",
230-
"early_stopping", "use_tensorboard_logger", "use_pynisher",
230+
"early_stopping", "use_tensorboard_logger",
231231
"metrics_during_training"
232232
233233
Returns:

autoPyTorch/configs/default_pipeline_options.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,5 @@
77
"torch_num_threads": 1,
88
"early_stopping": 20,
99
"use_tensorboard_logger": "False",
10-
"use_pynisher": "False",
1110
"metrics_during_training": "True"
1211
}

autoPyTorch/pipeline/base_pipeline.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -546,6 +546,5 @@ def get_default_pipeline_options() -> Dict[str, Any]:
546546
'torch_num_threads': 1,
547547
'early_stopping': 10,
548548
'use_tensorboard_logger': True,
549-
'use_pynisher': False,
550549
'metrics_during_training': True
551550
}

autoPyTorch/pipeline/components/training/trainer/base_trainer.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,6 @@ def __init__(self,
2929
3030
It also allows to define a 'epoch_or_time' budget type, which means,
3131
the first of them both which is exhausted, is honored
32-
33-
In case use_pynisher is set to false, this function allows to
34-
still terminate the task with a time domain consideration
3532
"""
3633
self.start_time = time.time()
3734
self.budget_type = budget_type

autoPyTorch/pipeline/components/training/trainer/base_trainer_choice.py

Lines changed: 3 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,6 @@
1212

1313
import numpy as np
1414

15-
import pynisher
16-
1715
import torch
1816
from torch.optim import Optimizer
1917
from torch.optim.lr_scheduler import _LRScheduler
@@ -196,37 +194,16 @@ def fit(self, X: Dict[str, Any], y: Any = None, **kwargs: Any) -> autoPyTorchCom
196194
] if 'logger_port' in X else logging.handlers.DEFAULT_TCP_LOGGING_PORT,
197195
)
198196

199-
fit_function = self._fit
200-
if X['use_pynisher']:
201-
wall_time_in_s = X['runtime'] if 'runtime' in X else None
202-
memory_limit = X['cpu_memory_limit'] if 'cpu_memory_limit' in X else None
203-
fit_function = pynisher.enforce_limits(
204-
wall_time_in_s=wall_time_in_s,
205-
mem_in_mb=memory_limit,
206-
logger=self.logger
207-
)(self._fit)
208-
209197
# Call the actual fit function.
210-
state_dict = fit_function(
198+
self._fit(
211199
X=X,
212200
y=y,
213201
**kwargs
214202
)
215203

216-
if X['use_pynisher']:
217-
# Normally the X[network] is a pointer to the object, so at the
218-
# end, when we train using X, the pipeline network is updated for free
219-
# If we do multiprocessing (because of pynisher) we have to update
220-
# X[network] manually. we do so in a way that every pipeline component
221-
# can see this new network -- via an update, not overwrite of the pointer
222-
state_dict = state_dict.result
223-
X['network'].load_state_dict(state_dict)
224-
225-
# TODO: when have the optimizer code, the pynisher object might have failed
226-
# We should process this function as Failure if so trough fit_function.exit_status
227204
return cast(autoPyTorchComponent, self.choice)
228205

229-
def _fit(self, X: Dict[str, Any], y: Any = None, **kwargs: Any) -> torch.nn.Module:
206+
def _fit(self, X: Dict[str, Any], y: Any = None, **kwargs: Any) -> 'TrainerChoice':
230207
"""
231208
Fits a component by using an input dictionary with pre-requisites
232209
@@ -359,7 +336,7 @@ def _fit(self, X: Dict[str, Any], y: Any = None, **kwargs: Any) -> torch.nn.Modu
359336
# Tag as fitted
360337
self.fitted_ = True
361338

362-
return X['network'].state_dict()
339+
return self
363340

364341
def early_stop_handler(self, X: Dict[str, Any]) -> bool:
365342
"""
@@ -444,10 +421,6 @@ def check_requirements(self, X: Dict[str, Any], y: Any = None) -> None:
444421
raise ValueError('Need a backend to provide the working directory, '
445422
"yet 'backend' was not found in the fit dictionary")
446423

447-
# For resource allocation, we need to know if pynisher is enabled
448-
if 'use_pynisher' not in X:
449-
raise ValueError('To fit a Trainer, expected fit dictionary to have use_pynisher')
450-
451424
# Whether we should evaluate metrics during training or no
452425
if 'metrics_during_training' not in X:
453426
raise ValueError('Missing metrics_during_training in the fit dictionary')

test/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,6 @@ def get_fit_dictionary(X, y, validator, backend):
290290
'early_stopping': 10,
291291
'working_dir': '/tmp',
292292
'use_tensorboard_logger': True,
293-
'use_pynisher': False,
294293
'metrics_during_training': True,
295294
'split_id': 0,
296295
'backend': backend,

test/test_api/test_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ def test_tabular_regression(openml_name, resampling_strategy, backend):
268268
# Check that smac was able to find proper models
269269
succesful_runs = [run_value.status for run_value in estimator.run_history.data.values(
270270
) if 'SUCCESS' in str(run_value.status)]
271-
assert len(succesful_runs) > 1, [(k, v) for k, v in estimator.run_history.data.items()]
271+
assert len(succesful_runs) >= 1, [(k, v) for k, v in estimator.run_history.data.items()]
272272

273273
# Search for an existing run key in disc. A individual model might have
274274
# a timeout and hence was not written to disc

test/test_pipeline/test_tabular_classification.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,10 @@ def test_pipeline_fit(self, fit_dictionary_tabular):
6363
cs = pipeline.get_hyperparameter_search_space()
6464
config = cs.sample_configuration()
6565
pipeline.set_hyperparameters(config)
66-
pipeline.fit(fit_dictionary_tabular)
66+
try:
67+
pipeline.fit(fit_dictionary_tabular)
68+
except Exception as e:
69+
pytest.fail(f"Failed due to {e} for config={config}")
6770

6871
# To make sure we fitted the model, there should be a
6972
# run summary object with accuracy
@@ -201,7 +204,7 @@ def test_remove_key_check_requirements(self, fit_dictionary_tabular):
201204
"""Makes sure that when a key is removed from X, correct error is outputted"""
202205
pipeline = TabularClassificationPipeline(
203206
dataset_properties=fit_dictionary_tabular['dataset_properties'])
204-
for key in ['num_run', 'device', 'split_id', 'use_pynisher', 'torch_num_threads', 'dataset_properties']:
207+
for key in ['num_run', 'device', 'split_id', 'torch_num_threads', 'dataset_properties']:
205208
fit_dictionary_tabular_copy = fit_dictionary_tabular.copy()
206209
fit_dictionary_tabular_copy.pop(key)
207210
with pytest.raises(ValueError, match=r"To fit .+?, expected fit dictionary to have"):

test/test_pipeline/test_tabular_regression.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def test_remove_key_check_requirements(self, fit_dictionary_tabular):
188188
"""Makes sure that when a key is removed from X, correct error is outputted"""
189189
pipeline = TabularRegressionPipeline(
190190
dataset_properties=fit_dictionary_tabular['dataset_properties'])
191-
for key in ['num_run', 'device', 'split_id', 'use_pynisher', 'torch_num_threads', 'dataset_properties']:
191+
for key in ['num_run', 'device', 'split_id', 'torch_num_threads', 'dataset_properties']:
192192
fit_dictionary_tabular_copy = fit_dictionary_tabular.copy()
193193
fit_dictionary_tabular_copy.pop(key)
194194
with pytest.raises(ValueError, match=r"To fit .+?, expected fit dictionary to have"):

0 commit comments

Comments
 (0)