-
Notifications
You must be signed in to change notification settings - Fork 5
/
metann_runner.py
160 lines (127 loc) · 5.33 KB
/
metann_runner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
import argparse
import time
import logging
import sys
import os
import pickle
import numpy as np
from acquisition_functions import acq_fn
from data import Data
from meta_neural_net import MetaNeuralnet
"""
meta neural net runner is used in run_experiments_parallel
- loads data by opening k*i pickle files from previous iterations
- trains a meta neural network and predicts accuracy of all candidates
- outputs k pickle files of the architecture to be trained next
"""
def run_meta_neuralnet(search_space, dicts,
k=10,
verbose=1,
num_ensemble=5,
epochs=10000,
lr=0.00001,
loss='scaled',
explore_type='its',
explore_factor=0.5):
# data: list of arch dictionary objects
# trains a meta neural network
# returns list of k arch dictionary objects - the k best predicted
results = []
meta_neuralnet = MetaNeuralnet()
data = search_space.encode_data(dicts)
xtrain = np.array([d[1] for d in data])
ytrain = np.array([d[2] for d in data])
candidates = search_space.get_candidates(data,
acq_opt_type='mutation_random',
encode_paths=True,
allow_isomorphisms=True,
deterministic_loss=None)
xcandidates = np.array([c[1] for c in candidates])
candidates_specs = [c[0] for c in candidates]
predictions = []
# train an ensemble of neural networks
train_error = 0
for _ in range(num_ensemble):
meta_neuralnet = MetaNeuralnet()
train_error += meta_neuralnet.fit(xtrain, ytrain,
loss=loss,
epochs=epochs,
lr=lr)
predictions.append(np.squeeze(meta_neuralnet.predict(xcandidates)))
train_error /= num_ensemble
if verbose:
print('Meta neural net train error: {}'.format(train_error))
sorted_indices = acq_fn(predictions, explore_type)
top_k_candidates = [candidates_specs[i] for i in sorted_indices[:k]]
candidates_dict = []
for candidate in top_k_candidates:
d = {}
d['spec'] = candidate
candidates_dict.append(d)
return candidates_dict
def run(args):
save_dir = '{}/'.format(args.experiment_name)
if not os.path.exists(save_dir):
os.mkdir(save_dir)
query = args.query
k = args.k
trained_prefix = args.trained_filename
untrained_prefix = args.untrained_filename
threshold = args.threshold
search_space = Data('darts')
# if it's the first iteration, choose k arches at random to train
if query == 0:
print('about to generate {} random'.format(k))
data = search_space.generate_random_dataset(num=k, train=False)
arches = [d['spec'] for d in data]
next_arches = []
for arch in arches:
d = {}
d['spec'] = arch
next_arches.append(d)
else:
# get the data from prior iterations from pickle files
data = []
for i in range(query):
filepath = '{}{}_{}.pkl'.format(save_dir, trained_prefix, i)
with open(filepath, 'rb') as f:
arch = pickle.load(f)
data.append(arch)
print('Iteration {}'.format(query))
print('Data from last round')
print(data)
# run the meta neural net to output the next arches
next_arches = run_meta_neuralnet(search_space, data, k=k)
print('next batch')
print(next_arches)
# output the new arches to pickle files
for i in range(k):
index = query + i
filepath = '{}{}_{}.pkl'.format(save_dir, untrained_prefix, index)
next_arches[i]['index'] = index
next_arches[i]['filepath'] = filepath
with open(filepath, 'wb') as f:
pickle.dump(next_arches[i], f)
def main(args):
#set up save dir
save_dir = './'
#set up logging
log_format = '%(asctime)s %(message)s'
logging.basicConfig(stream=sys.stdout, level=logging.INFO,
format=log_format, datefmt='%m/%d %I:%M:%S %p')
fh = logging.FileHandler(os.path.join(save_dir, 'log.txt'))
fh.setFormatter(logging.Formatter(log_format))
logging.getLogger().addHandler(fh)
logging.info(args)
run(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Args for meta neural net')
parser.add_argument('--experiment_name', type=str, default='darts_test', help='Folder for input/output files')
parser.add_argument('--params', type=str, default='test', help='Which set of params to use')
parser.add_argument('--query', type=int, default=0, help='Which query is Neural BayesOpt on')
parser.add_argument('--trained_filename', type=str, default='trained_spec', help='name of input files')
parser.add_argument('--untrained_filename', type=str, default='untrained_spec', help='name of output files')
parser.add_argument('--k', type=int, default=10, help='number of arches to train per iteration')
parser.add_argument('--threshold', type=int, default=20, help='throw out arches with val loss above threshold')
args = parser.parse_args()
main(args)