Skip to content

Commit

Permalink
Fix some issues found by CI
Browse files Browse the repository at this point in the history
Signed-off-by: Emanuele Ballarin <emanuele@ballarin.cc>
  • Loading branch information
emaballarin committed Aug 20, 2024
1 parent 9551cea commit e30f782
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
12 changes: 6 additions & 6 deletions ebtorch/optim/sophia.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,15 @@ def __init__(
capturable: bool = False,
):
# Validate parameters
if not lr >= 0.0:
if lr < 0:
raise ValueError(f"Learning rate must be positive. Got: {lr}.")
if not 0.0 <= betas[0] < 1.0:
if not 0 <= betas[0] < 1:
raise ValueError(f"Beta[0] must be in [0, 1). Got: {betas[0]}.")
if not 0.0 <= betas[1] < 1.0:
if not 0 <= betas[1] < 1:
raise ValueError(f"Beta[1] must be in [0, 1). Got: {betas[1]}.")
if not rho >= 0.0:
if rho < 0:
raise ValueError(f"Rho must be non-negative. Got: {rho}.")
if not weight_decay >= 0.0:
if weight_decay < 0:
raise ValueError(f"Weight decay must be non-negative. Got: {weight_decay}.")
defaults = dict(
lr=lr,
Expand Down Expand Up @@ -65,7 +65,7 @@ def __setstate__(self, state):
@torch.no_grad()
def update_hessian(self):
for group in self.param_groups:
beta1, beta2 = group["betas"]
_, beta2 = group["betas"]
for p in group["params"]:
if p.grad is None:
continue
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def read(fname):

setup(
name=PACKAGENAME,
version="0.26.7",
version="0.26.8",
author="Emanuele Ballarin",
author_email="emanuele@ballarin.cc",
url="https://github.com/emaballarin/ebtorch",
Expand Down

0 comments on commit e30f782

Please sign in to comment.