Skip to content

Commit f8640fb

Browse files
ArlindKadraravinkohli
authored andcommitted
Updating search space (#156)
* Updating search space * fix typo * Bug fix * Fixing buggy implementation of predict when using gpu bug fixes fixing code style checks bug fix for use_pynisher in the base pipeline bug fix
1 parent 0ea47d3 commit f8640fb

18 files changed

+175
-128
lines changed

autoPyTorch/pipeline/base_pipeline.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -500,7 +500,7 @@ def get_fit_requirements(self) -> List[FitRequirement]:
500500
Returns:
501501
List[NamedTuple]: List of FitRequirements
502502
"""
503-
fit_requirements = list() # List[FitRequirement]
503+
fit_requirements: List[FitRequirement] = list()
504504
for name, step in self.steps:
505505
step_requirements = step.get_fit_requirements()
506506
if step_requirements:
@@ -569,6 +569,7 @@ def get_pipeline_representation(self) -> Dict[str, str]:
569569

570570
@staticmethod
571571
def get_default_pipeline_options() -> Dict[str, Any]:
572+
572573
return {
573574
'num_run': 0,
574575
'device': 'cpu',
@@ -578,5 +579,6 @@ def get_default_pipeline_options() -> Dict[str, Any]:
578579
'torch_num_threads': 1,
579580
'early_stopping': 10,
580581
'use_tensorboard_logger': True,
582+
'use_pynisher': False,
581583
'metrics_during_training': True
582584
}

autoPyTorch/pipeline/components/setup/lr_scheduler/CosineAnnealingWarmRestarts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Any, Dict, Optional, Tuple, Union
1+
from typing import Any, Dict, Optional, Union
22

33
from ConfigSpace.configuration_space import ConfigurationSpace
44
from ConfigSpace.hyperparameters import UniformIntegerHyperparameter

autoPyTorch/pipeline/components/setup/lr_scheduler/ReduceLROnPlateau.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
from typing import Any, Dict, Optional, Tuple, Union
1+
from typing import Any, Dict, Optional, Union
2+
23
from ConfigSpace.configuration_space import ConfigurationSpace
34
from ConfigSpace.hyperparameters import (
45
CategoricalHyperparameter,

autoPyTorch/pipeline/components/setup/network/base_network.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,7 @@ def predict(self, loader: torch.utils.data.DataLoader) -> torch.Tensor:
128128
return Y_snapshot_preds_tensor.mean(dim=0).cpu().numpy()
129129

130130
def _predict(self, network: torch.nn.Module, loader: torch.utils.data.DataLoader) -> torch.Tensor:
131+
network.to(self.device)
131132
network.float()
132133
network.eval()
133134
# Batch prediction
@@ -136,10 +137,10 @@ def _predict(self, network: torch.nn.Module, loader: torch.utils.data.DataLoader
136137
for i, (X_batch, Y_batch) in enumerate(loader):
137138
# Predict on batch
138139
X_batch = X_batch.float().to(self.device)
139-
Y_batch_pred = network(X_batch).detach().cpu()
140+
Y_batch_pred = network(X_batch)
140141
if self.final_activation is not None:
141142
Y_batch_pred = self.final_activation(Y_batch_pred)
142-
Y_batch_preds.append(Y_batch_pred)
143+
Y_batch_preds.append(Y_batch_pred.detach().cpu())
143144

144145
return torch.cat(Y_batch_preds, 0)
145146

autoPyTorch/pipeline/components/setup/network_backbone/MLPBackbone.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,13 +89,13 @@ def get_hyperparameter_search_space(
8989
num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_units",
9090
value_range=(10, 1024),
9191
default_value=200,
92+
log=True
9293
),
9394
dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="dropout",
9495
value_range=(0, 0.8),
9596
default_value=0.5,
9697
),
9798
) -> ConfigurationSpace:
98-
9999
cs = ConfigurationSpace()
100100

101101
# The number of hidden layers the network will have.
@@ -116,6 +116,7 @@ def get_hyperparameter_search_space(
116116
default_value=num_units.default_value,
117117
log=num_units.log)
118118
n_units_hp = get_hyperparameter(n_units_search_space, UniformIntegerHyperparameter)
119+
119120
cs.add_hyperparameter(n_units_hp)
120121

121122
if i > int(min_mlp_layers):

autoPyTorch/pipeline/components/setup/network_backbone/ResNetBackbone.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,12 +113,14 @@ def get_hyperparameter_search_space(
113113
default_value=True,
114114
),
115115
multi_branch_choice: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mb_choice",
116-
value_range=('None', 'shake-shake', 'shake-drop'),
116+
value_range=('None', 'shake-shake',
117+
'shake-drop'),
117118
default_value='shake-drop',
118119
),
119120
num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_units",
120121
value_range=(10, 1024),
121122
default_value=200,
123+
log=True
122124
),
123125
activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="activation",
124126
value_range=tuple(_activations.keys()),

autoPyTorch/pipeline/components/setup/network_backbone/ShapedMLPBackbone.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -96,11 +96,11 @@ def get_hyperparameter_search_space(
9696
max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_units",
9797
value_range=(10, 1024),
9898
default_value=200,
99-
),
99+
log=True),
100100
output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="output_dim",
101101
value_range=(10, 1024),
102102
default_value=200,
103-
),
103+
log=True),
104104
mlp_shape: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mlp_shape",
105105
value_range=('funnel', 'long_funnel',
106106
'diamond', 'hexagon',
@@ -114,7 +114,6 @@ def get_hyperparameter_search_space(
114114
),
115115

116116
) -> ConfigurationSpace:
117-
118117
cs = ConfigurationSpace()
119118

120119
# The number of groups that will compose the resnet. That is,

autoPyTorch/pipeline/components/setup/network_backbone/ShapedResNetBackbone.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ def get_hyperparameter_search_space( # type: ignore[override]
9898
output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="output_dim",
9999
value_range=(10, 1024),
100100
default_value=200,
101+
log=True
101102
),
102103
num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_groups",
103104
value_range=(1, 15),
@@ -116,12 +117,15 @@ def get_hyperparameter_search_space( # type: ignore[override]
116117
default_value=True,
117118
),
118119
multi_branch_choice: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mb_choice",
119-
value_range=('None', 'shake-shake', 'shake-drop'),
120+
value_range=('None', 'shake-shake',
121+
'shake-drop'),
120122
default_value='shake-drop',
121123
),
122124
max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_units",
123125
value_range=(10, 1024),
124-
default_value=200),
126+
default_value=200,
127+
log=True
128+
),
125129
activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="activation",
126130
value_range=tuple(_activations.keys()),
127131
default_value=list(_activations.keys())[0]),
@@ -154,6 +158,7 @@ def get_hyperparameter_search_space( # type: ignore[override]
154158

155159
use_dropout = get_hyperparameter(use_dropout, CategoricalHyperparameter)
156160
max_dropout = get_hyperparameter(max_dropout, UniformFloatHyperparameter)
161+
cs.add_hyperparameters([use_dropout, max_dropout])
157162
cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True))
158163

159164
use_sc = get_hyperparameter(use_skip_connection, CategoricalHyperparameter)

autoPyTorch/pipeline/components/setup/optimizer/AdamOptimizer.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -93,12 +93,13 @@ def get_hyperparameter_search_space(
9393
value_range=(0.9, 0.9999),
9494
default_value=0.9),
9595
use_weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_weight_decay",
96-
value_range=(True, False),
97-
default_value=True,
98-
),
96+
value_range=(True, False),
97+
default_value=True,
98+
),
9999
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
100-
value_range=(0.0, 0.1),
101-
default_value=0.0),
100+
value_range=(1E-7, 0.1),
101+
default_value=1E-4,
102+
log=True),
102103
) -> ConfigurationSpace:
103104
cs = ConfigurationSpace()
104105

autoPyTorch/pipeline/components/setup/optimizer/AdamWOptimizer.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,9 @@ def get_hyperparameter_search_space(
9797
default_value=True,
9898
),
9999
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
100-
value_range=(0.0, 0.1),
101-
default_value=0.0),
100+
value_range=(1E-7, 0.1),
101+
default_value=1E-4,
102+
log=True),
102103
) -> ConfigurationSpace:
103104
cs = ConfigurationSpace()
104105

autoPyTorch/pipeline/components/setup/optimizer/RMSpropOptimizer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,9 @@ def get_hyperparameter_search_space(
9797
default_value=True,
9898
),
9999
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
100-
value_range=(0.0, 0.1),
101-
default_value=0.0),
100+
value_range=(1E-7, 0.1),
101+
default_value=1E-4,
102+
log=True),
102103
momentum: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="momentum",
103104
value_range=(0.0, 0.99),
104105
default_value=0.0),
@@ -109,7 +110,6 @@ def get_hyperparameter_search_space(
109110
add_hyperparameter(cs, lr, UniformFloatHyperparameter)
110111
add_hyperparameter(cs, alpha, UniformFloatHyperparameter)
111112
add_hyperparameter(cs, momentum, UniformFloatHyperparameter)
112-
113113
weight_decay = get_hyperparameter(weight_decay, UniformFloatHyperparameter)
114114
use_weight_decay = get_hyperparameter(use_weight_decay, CategoricalHyperparameter)
115115
cs.add_hyperparameters([use_weight_decay, weight_decay])

autoPyTorch/pipeline/components/setup/optimizer/SGDOptimizer.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,9 @@ def get_hyperparameter_search_space(
8888
default_value=True,
8989
),
9090
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
91-
value_range=(0.0, 0.1),
92-
default_value=0.0),
91+
value_range=(1E-7, 0.1),
92+
default_value=1E-4,
93+
log=True),
9394
momentum: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="momentum",
9495
value_range=(0.0, 0.99),
9596
default_value=0.0),

autoPyTorch/pipeline/components/training/data_loader/base_data_loader.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -264,10 +264,12 @@ def get_hyperparameter_search_space(
264264
dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
265265
batch_size: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="batch_size",
266266
value_range=(32, 320),
267-
default_value=64)
267+
default_value=64,
268+
log=True)
268269
) -> ConfigurationSpace:
269270
cs = ConfigurationSpace()
270271
add_hyperparameter(cs, batch_size, UniformIntegerHyperparameter)
272+
271273
return cs
272274

273275
def __str__(self) -> str:

0 commit comments

Comments
 (0)