Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added AdaMax optimizer #54

Merged
merged 3 commits into from
Jan 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion popt/loop/ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@ def _invert_scale_state(self):
"""
Transform the internal state from [0, 1] to [lb, ub]
"""
if self.upper_bound and self.lower_bound:
if self.transform and (self.upper_bound and self.lower_bound):
for i, key in enumerate(self.state):
if self.transform:
self.state[key] = self.lower_bound[i] + self.state[key]*(self.upper_bound[i] - self.lower_bound[i])
Expand Down
3 changes: 3 additions & 0 deletions popt/update_schemes/enopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,9 @@ def __set__variable(var_name=None, defalut=None):
self.optimizer = opt.GradientAscent(self.alpha, self.beta)
elif optimizer == 'Adam':
self.optimizer = opt.Adam(self.alpha, self.beta)
elif optimizer == 'AdaMax':
self.normalize = False
self.optimizer = opt.AdaMax(self.alpha, self.beta)

# The EnOpt class self-ignites, and it is possible to send the EnOpt class as a callale method to scipy.minimize
self.run_loop() # run_loop resides in the Optimization class (super)
Expand Down
28 changes: 28 additions & 0 deletions popt/update_schemes/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,3 +287,31 @@ def restore_parameters(self):

def get_step_size(self):
return self._step_size


class AdaMax(Adam):
'''
AdaMax optimizer

References
-------------------------------------------------------------------------------------
[1] Kingma, D. P., & Ba, J. (2014).
Adam: A Method for Stochastic Optimization.
arXiv preprint arXiv:1412.6980.
'''
def __init__(self, step_size, beta1=0.9, beta2=0.999):
super().__init__(step_size, beta1, beta2)

def apply_update(self, control, gradient, **kwargs):
iter = kwargs['iter']
alpha = self._step_size
beta1 = self.beta1
beta2 = self.beta2

self.temp_vel1 = beta1*self.vel1 + (1-beta1)*gradient
self.temp_vel2 = max(beta2*self.vel2, np.linalg.norm(gradient))

step = alpha/(1-beta1**iter) * self.temp_vel1/self.temp_vel2
new_control = control - step # steepest decent
return new_control, step

Loading