Skip to content

Commit 9b338e4

Browse files
author
Lucas Zimmer
committed
Add gradient norm logging, fix tiny_cs for regression
1 parent d9b693a commit 9b338e4

File tree

4 files changed

+24
-9
lines changed

4 files changed

+24
-9
lines changed

autoPyTorch/components/metrics/additional_logs.py

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
class test_result():
32
"""Log the performance on the test set"""
43
def __init__(self, autonet, X_test, Y_test):
@@ -10,4 +9,24 @@ def __call__(self, model, epochs):
109
if self.Y_test is None or self.X_test is None:
1110
return float("nan")
1211

13-
return self.autonet.score(self.X_test, self.Y_test)
12+
return self.autonet.score(self.X_test, self.Y_test)
13+
14+
15+
class gradient_norm():
16+
"""Log the norm of the gradients"""
17+
def __init_(self):
18+
pass
19+
20+
def __call__(self, network, epoch):
21+
total_gradient = 0
22+
n_params = 0
23+
24+
for p in list(filter(lambda p: p.grad is not None, network.parameters())):
25+
total_gradient += p.grad.data.norm(2).item()
26+
n_params += 1
27+
28+
# Prevent division through 0
29+
if total_gradient==0:
30+
n_params = 1
31+
32+
return total_gradient/n_params

autoPyTorch/core/presets/feature_regression/tiny_cs.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ preprocessors=[truncated_svd]
55
batch_loss_computation_techniques=[standard]
66
imputation_strategies=[median]
77
initialization_methods=[default]
8-
loss_modules=[cross_entropy_weighted]
8+
loss_modules=[l1_loss]
99
normalization_strategies=[standardize]
1010
optimizer=[sgd]
11-
hyperparameter_search_space_updates=autoPyTorch/core/presets/tiny_cs_updates.txt
11+
hyperparameter_search_space_updates=autoPyTorch/core/presets/tiny_cs_updates.txt

autoPyTorch/pipeline/nodes/image/optimization_algorithm_no_timelimit.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
21
import numpy as np
32
import os
43
import time
@@ -9,7 +8,6 @@
98
import itertools
109
import random
1110

12-
1311
import autoPyTorch.utils.thread_read_write as thread_read_write
1412
import datetime
1513

@@ -75,7 +73,6 @@ def __init__(self, optimization_pipeline_nodes):
7573
def fit(self, pipeline_config, X_train, Y_train, X_valid, Y_valid, refit=None):
7674
res = None
7775

78-
7976
config_space = self.pipeline.get_hyperparameter_search_space(**pipeline_config)
8077
config_space, constants = remove_constant_hyperparameter(config_space)
8178
config_space.seed(pipeline_config['random_seed'])
@@ -126,7 +123,6 @@ def fit(self, pipeline_config, X_train, Y_train, X_valid, Y_valid, refit=None):
126123
if task_id in [1, -1]:
127124
self.run_optimization_algorithm(pipeline_config, config_space, constants, run_id, ns_host, ns_port, NS, task_id)
128125

129-
130126
res = self.parse_results(pipeline_config["result_logger_dir"])
131127

132128
except Exception as e:

autoPyTorch/pipeline/nodes/optimization_algorithm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def predict(self, pipeline_config, X):
157157
def get_pipeline_config_options(self):
158158
options = [
159159
ConfigOption("run_id", default="0", type=str, info="Unique id for each run."),
160-
ConfigOption("task_id", default=-1, type=int, info="ID for each worker, if you run AutoNet on a cluster. Set to -1, if you run it locally. "),
160+
ConfigOption("task_id", default=-1, type=int, info="ID for each worker, if you run AutoNet on a cluster. Set to -1, if you run it locally."),
161161
ConfigOption("algorithm", default="bohb", type=str, choices=list(self.algorithms.keys()), info="Algorithm to use for config sampling."),
162162
ConfigOption("budget_type", default="time", type=str, choices=list(self.budget_types.keys())),
163163
ConfigOption("min_budget", default=lambda c: self.budget_types[c["budget_type"]].default_min_budget, type=float, depends=True, info="Min budget for fitting configurations."),

0 commit comments

Comments
 (0)