This repository has been archived by the owner on Feb 20, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
✨ Add utility functions for LR scheduling
- Loading branch information
1 parent
35c0249
commit 4d2ed35
Showing
2 changed files
with
34 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
from .scheduling import sched_sss | ||
from .scheduling import sched_ssse | ||
|
||
del scheduling |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
# Copyright (c) 2020- AI-CPS@UniTS | ||
# Copyright (c) 2020- Emanuele Ballarin <emanuele@ballarin.cc> | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
import numpy as np | ||
|
||
|
||
def sched_sss(start_lr: float, stop_lr: float, nr_decays): | ||
r"""Obtain the canonical scheduling parametrization from the start/stop/step one. | ||
Arguments: | ||
start_lr (float): initial learning rate | ||
stop_lr (float): final learning rate | ||
nr_decays: number of learning rate decay steps | ||
""" | ||
|
||
return (start_lr), (np.power((stop_lr / start_lr), (1.0 / nr_decays)).item()) | ||
|
||
|
||
def sched_ssse(start_lr: float, stop_lr: float, nr_decays, epochs: int): | ||
r"""Obtain the canonical scheduling parametrization from the start/stop/step one, with explicit epoch specification. | ||
Arguments: | ||
start_lr (float): initial learning rate | ||
stop_lr (float): final learning rate | ||
nr_decays: number of learning rate decay steps | ||
epochs (int): number of total learning epochs | ||
""" | ||
|
||
return (sched_sss(start_lr, stop_lr, nr_decays)), int(epochs // nr_decays) |