Skip to content

Commit a41fc1d

Browse files
authored
Merge pull request #4 from ddbourgin/master
update merge
2 parents 497aad3 + b28bb28 commit a41fc1d

File tree

3 files changed

+14
-12
lines changed

3 files changed

+14
-12
lines changed

numpy_ml/neural_nets/initializers/initializers.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -122,13 +122,14 @@ def init_from_dict(self):
122122
raise ValueError("Must have `hyperparameters` key: {}".format(S))
123123

124124
if sc and sc["id"] == "ConstantScheduler":
125-
scheduler = ConstantScheduler().set_params(sc)
125+
scheduler = ConstantScheduler()
126126
elif sc and sc["id"] == "ExponentialScheduler":
127-
scheduler = ExponentialScheduler().set_params(sc)
127+
scheduler = ExponentialScheduler()
128128
elif sc and sc["id"] == "NoamScheduler":
129-
scheduler = NoamScheduler().set_params(sc)
129+
scheduler = NoamScheduler()
130130
elif sc:
131131
raise NotImplementedError("{}".format(sc["id"]))
132+
scheduler.set_params(sc)
132133
return scheduler
133134

134135

@@ -182,15 +183,16 @@ def init_from_dict(self):
182183
raise ValueError("Must have `hyperparemeters` key: {}".format(O))
183184

184185
if op and op["id"] == "SGD":
185-
optimizer = SGD().set_params(op, cc)
186+
optimizer = SGD()
186187
elif op and op["id"] == "RMSProp":
187-
optimizer = RMSProp().set_params(op, cc)
188+
optimizer = RMSProp()
188189
elif op and op["id"] == "AdaGrad":
189-
optimizer = AdaGrad().set_params(op, cc)
190+
optimizer = AdaGrad()
190191
elif op and op["id"] == "Adam":
191-
optimizer = Adam().set_params(op, cc)
192+
optimizer = Adam()
192193
elif op:
193194
raise NotImplementedError("{}".format(op["id"]))
195+
optimizer.set_params(op, cc)
194196
return optimizer
195197

196198

numpy_ml/neural_nets/layers/layers.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,12 +119,12 @@ def set_params(self, summary_dict):
119119
if k in self.hyperparameters:
120120
if k == "act_fn":
121121
layer.act_fn = ActivationInitializer(v)()
122-
if k == "optimizer":
122+
elif k == "optimizer":
123123
layer.optimizer = OptimizerInitializer(sd[k])()
124-
if k not in ["wrappers", "optimizer"]:
125-
setattr(layer, k, v)
126-
if k == "wrappers":
124+
elif k == "wrappers":
127125
layer = init_wrappers(layer, sd[k])
126+
elif k not in ["wrappers", "optimizer"]:
127+
setattr(layer, k, v)
128128
return layer
129129

130130
def summary(self):

numpy_ml/plots/gmm_plots.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def plot():
8484
# take best fit over 10 runs
8585
best_elbo = -np.inf
8686
for k in range(10):
87-
_G = GMM(C=n_classes, seed=i * 3)
87+
_G = GMM(C=n_classes, seed=k * 3)
8888
ret = _G.fit(X, max_iter=100, verbose=False)
8989
while ret != 0:
9090
print("Components collapsed; Refitting")

0 commit comments

Comments
 (0)