-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathDeepCNN_Functions.py
32 lines (27 loc) · 1.18 KB
/
DeepCNN_Functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
''' Helper Functions '''
#from tensorflow.keras.callbacks import LearningRateScheduler # A Keras callback. We’ll pass our learning rate schedule to this class which will be called as a callback at the completion of each epoch to calculate our learning rate
import numpy as np
# class LearningRate():
# def __init__(self, epochs, initAlpha=0.01, factor=0.5, dropEvery=10):
# # store the base initial learning rate, drop factor, and
# # epochs to drop every
# self.initAlpha = initAlpha
# self.factor = factor
# self.dropEvery = dropEvery
# def __call__(self, epoch):
# # compute the learning rate for the current epoch
# exp = np.floor((1 + epoch) / self.dropEvery)
# alpha = self.initAlpha * (self.factor ** exp)
# # return the learning rate
# return float(alpha)
def scheduler(epoch):
# compute the learning rate for the current epoch
dropEvery = 10
initAlpha = 0.01
factor = 0.5
exp = np.floor((1 + epoch) / dropEvery)
alpha = initAlpha * (factor ** exp)
# return the learning rate
print('lr =', alpha)
return float(alpha)
#TODO : stop the deacay (and the whole training after 20 .... lie in the paper)