Skip to content

Commit

Permalink
mv learning rate and momentum from learning rule to learning method
Browse files Browse the repository at this point in the history
  • Loading branch information
hycis committed Nov 7, 2014
1 parent 0a432eb commit 89d0956
Show file tree
Hide file tree
Showing 8 changed files with 313 additions and 143 deletions.
68 changes: 54 additions & 14 deletions hps/AE.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pynet.datasets.spec as spec
import pynet.datasets.mnist as mnist
import pynet.datasets.mapping as mapping
import pynet.learning_method as learning_methods
from pynet.learning_rule import LearningRule
from pynet.log import Log
from pynet.train_object import TrainObject
Expand Down Expand Up @@ -128,11 +129,25 @@ def build_dataset(self):
return dataset


def build_learning_method(self):

if self.state.learning_method.type == 'SGD':
learn_method = getattr(learning_methods,
self.state.learning_method.type)(
learning_rate = self.state.learning_method.learning_rate,
momentum = self.state.learning_method.momentum)

else:
learn_method = getattr(learning_methods, self.state.learning_method.type)()

return learn_method


def build_learning_rule(self):
learning_rule = LearningRule(max_col_norm = self.state.learning_rule.max_col_norm,
learning_rate = self.state.learning_rule.learning_rate,
momentum = self.state.learning_rule.momentum,
momentum_type = self.state.learning_rule.momentum_type,
# learning_rate = self.state.learning_rule.learning_rate,
# momentum = self.state.learning_rule.momentum,
# momentum_type = self.state.learning_rule.momentum_type,
L1_lambda = self.state.learning_rule.L1_lambda,
L2_lambda = self.state.learning_rule.L2_lambda,
training_cost = Cost(type = self.state.learning_rule.cost),
Expand Down Expand Up @@ -211,26 +226,31 @@ def build_two_hid_model(self, input_dim):
return model


def build_database(self, dataset, learning_rule, model):
def build_database(self, dataset, learning_rule, learning_method, model):
save_to_database = {'name' : self.state.log.save_to_database_name,
'records' : {'Dataset' : dataset.__class__.__name__,
'max_col_norm' : learning_rule.max_col_norm,
'Weight_Init_Seed' : model.rand_seed,
'Dropout_Below' : str([layer.dropout_below for layer in model.layers]),
# 'Blackout_Below' : str([layer.blackout_below for layer in model.layers]),
'Learning_Method' : learning_method.__class__.__name__,
'Batch_Size' : dataset.batch_size,
'Dataset_Noise' : dataset.noise.__class__.__name__,
'Layer_Noise' : str([layer.noise.__class__.__name__ for layer in model.layers]),
'nblocks' : dataset.nblocks(),
'Layer_Types' : str([layer.__class__.__name__ for layer in model.layers]),
'Layer_Dim' : str([layer.dim for layer in model.layers]),
'Preprocessor' : dataset.preprocessor.__class__.__name__,
'Learning_Rate' : learning_rule.learning_rate,
'Momentum' : learning_rule.momentum,
# 'Learning_Rate' : learning_rule.learning_rate,
# 'Momentum' : learning_rule.momentum,
'Training_Cost' : learning_rule.cost.type,
'Stopping_Cost' : learning_rule.stopping_criteria['cost'].type}
}

if learning_method.__class__.__name__ == "SGD":
save_to_database["records"]["Learning_rate"] = learning_method.learning_rate
save_to_database["records"]["Momentum"] = learning_method.momentum

return save_to_database

class AE_Testing(AE):
Expand All @@ -243,15 +263,18 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

model = self.build_one_hid_model(dataset.feature_size())

if self.state.log.save_to_database_name:
database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
log = self.build_log(database)

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)
train_obj.run()

Expand Down Expand Up @@ -293,12 +316,14 @@ def run(self):
model.add_layer(output)

learning_rule = self.build_learning_rule()
database = self.build_database(dataset, learning_rule, model)
learn_method = self.build_learning_method()
database = self.build_database(dataset, learning_rule, learn_method, model)
log = self.build_log(database)

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

train_obj.run()
Expand All @@ -314,6 +339,7 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

# import pdb
# pdb.set_trace()
Expand All @@ -325,21 +351,27 @@ def run(self):
else:
raise ValueError()

database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
log = self.build_log(database)

dataset.log = log

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

train_obj.run()

log.info("Fine Tuning")
train_obj.model.layers[0].dropout_below = None
# train_obj.model.layers[0].dropout_below = None
# train_obj.model.layers[0].blackout_below = None

for layer in train_obj.model.layers:
layer.dropout_below = None
layer.noise = None

train_obj.setup()
train_obj.run()

Expand All @@ -359,6 +391,7 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

model = self.build_model()

Expand All @@ -368,13 +401,14 @@ def run(self):
print "Fine Tuning Only"

if self.state.log.save_to_database_name:
database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
database['records']['model'] = self.state.hidden1.model
log = self.build_log(database)

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

train_obj.run()
Expand Down Expand Up @@ -414,19 +448,21 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

model = self.build_model(dataset.feature_size())
model.layers[0].dropout_below = self.state.hidden1.dropout_below

if self.state.log.save_to_database_name:
database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
database['records']['h1_model'] = self.state.hidden1.model
database['records']['h2_model'] = self.state.hidden2.model
log = self.build_log(database)

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

log.info("Fine Tuning")
Expand Down Expand Up @@ -467,6 +503,7 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

model = self.build_model(dataset.feature_size())

Expand All @@ -481,7 +518,7 @@ def run(self):
# model.layers[2].blackout_below = self.state.hidden3.blackout_below

if self.state.log.save_to_database_name:
database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
database['records']['h1_model'] = self.state.hidden1.model
database['records']['h2_model'] = self.state.hidden2.model
database['records']['h3_model'] = self.state.hidden3.model
Expand All @@ -490,6 +527,7 @@ def run(self):
train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

train_obj.run()
Expand All @@ -512,19 +550,21 @@ def run(self):

dataset = self.build_dataset()
learning_rule = self.build_learning_rule()
learn_method = self.build_learning_method()

if self.state.num_layers == 1:
model = self.build_one_hid_model_no_transpose(dataset.feature_size())
else:
raise ValueError()

if self.state.log.save_to_database_name:
database = self.build_database(dataset, learning_rule, model)
database = self.build_database(dataset, learning_rule, learn_method, model)
log = self.build_log(database)

train_obj = TrainObject(log = log,
dataset = dataset,
learning_rule = learning_rule,
learning_method = learn_method,
model = model)

train_obj.run()
Expand Down
Loading

0 comments on commit 89d0956

Please sign in to comment.