From e30f78295fc36be273f2434293f43d3a4671dcc8 Mon Sep 17 00:00:00 2001 From: Emanuele Ballarin Date: Wed, 21 Aug 2024 01:08:47 +0200 Subject: [PATCH] Fix some issues found by CI Signed-off-by: Emanuele Ballarin --- ebtorch/optim/sophia.py | 12 ++++++------ setup.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ebtorch/optim/sophia.py b/ebtorch/optim/sophia.py index d6b7185..d114795 100644 --- a/ebtorch/optim/sophia.py +++ b/ebtorch/optim/sophia.py @@ -29,15 +29,15 @@ def __init__( capturable: bool = False, ): # Validate parameters - if not lr >= 0.0: + if lr < 0: raise ValueError(f"Learning rate must be positive. Got: {lr}.") - if not 0.0 <= betas[0] < 1.0: + if not 0 <= betas[0] < 1: raise ValueError(f"Beta[0] must be in [0, 1). Got: {betas[0]}.") - if not 0.0 <= betas[1] < 1.0: + if not 0 <= betas[1] < 1: raise ValueError(f"Beta[1] must be in [0, 1). Got: {betas[1]}.") - if not rho >= 0.0: + if rho < 0: raise ValueError(f"Rho must be non-negative. Got: {rho}.") - if not weight_decay >= 0.0: + if weight_decay < 0: raise ValueError(f"Weight decay must be non-negative. Got: {weight_decay}.") defaults = dict( lr=lr, @@ -65,7 +65,7 @@ def __setstate__(self, state): @torch.no_grad() def update_hessian(self): for group in self.param_groups: - beta1, beta2 = group["betas"] + _, beta2 = group["betas"] for p in group["params"]: if p.grad is None: continue diff --git a/setup.py b/setup.py index cadc9c2..e45a409 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ def read(fname): setup( name=PACKAGENAME, - version="0.26.7", + version="0.26.8", author="Emanuele Ballarin", author_email="emanuele@ballarin.cc", url="https://github.com/emaballarin/ebtorch",