Skip to content

Commit

Permalink
Added AdaMax optimizer and fixed scaling
Browse files Browse the repository at this point in the history
  • Loading branch information
Mathias Methlie Nilsen authored and KriFos1 committed Jan 3, 2024
1 parent bdf61f9 commit ad348e7
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 1 deletion.
2 changes: 1 addition & 1 deletion popt/loop/ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@ def _invert_scale_state(self):
"""
Transform the internal state from [0, 1] to [lb, ub]
"""
if self.upper_bound and self.lower_bound:
if self.transform and (self.upper_bound and self.lower_bound):
for i, key in enumerate(self.state):
if self.transform:
self.state[key] = self.lower_bound[i] + self.state[key]*(self.upper_bound[i] - self.lower_bound[i])
Expand Down
3 changes: 3 additions & 0 deletions popt/update_schemes/enopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,9 @@ def __set__variable(var_name=None, defalut=None):
self.optimizer = opt.GradientAscent(self.alpha, self.beta)
elif optimizer == 'Adam':
self.optimizer = opt.Adam(self.alpha, self.beta)
elif optimizer == 'AdaMax':
self.normalize = False
self.optimizer = opt.AdaMax(self.alpha, self.beta)

# The EnOpt class self-ignites, and it is possible to send the EnOpt class as a callale method to scipy.minimize
self.run_loop() # run_loop resides in the Optimization class (super)
Expand Down
28 changes: 28 additions & 0 deletions popt/update_schemes/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,3 +287,31 @@ def restore_parameters(self):

def get_step_size(self):
return self._step_size


class AdaMax(Adam):
'''
AdaMax optimizer
References
-------------------------------------------------------------------------------------
[1] Kingma, D. P., & Ba, J. (2014).
Adam: A Method for Stochastic Optimization.
arXiv preprint arXiv:1412.6980.
'''
def __init__(self, step_size, beta1=0.9, beta2=0.999):
super().__init__(step_size, beta1, beta2)

def apply_update(self, control, gradient, **kwargs):
iter = kwargs['iter']
alpha = self._step_size
beta1 = self.beta1
beta2 = self.beta2

self.temp_vel1 = beta1*self.vel1 + (1-beta1)*gradient
self.temp_vel2 = max(beta2*self.vel2, np.linalg.norm(gradient))

step = alpha/(1-beta1**iter) * self.temp_vel1/self.temp_vel2
new_control = control - step # steepest decent
return new_control, step

0 comments on commit ad348e7

Please sign in to comment.