Skip to content

Commit ebb04e2

Browse files
change end_lr to end_learning_rate
1 parent 70f97a8 commit ebb04e2

File tree

2 files changed

+18
-18
lines changed

2 files changed

+18
-18
lines changed

python/paddle/fluid/tests/unittests/test_lr_scheduler.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ def one_cycle_lr(epoch_num,
325325
max_learning_rate,
326326
total_steps,
327327
divide_factor=25,
328-
end_lr=0.0001,
328+
end_learning_rate=0.0001,
329329
phase_pct=0.3,
330330
anneal_strategy='cos',
331331
three_phase=False,
@@ -347,7 +347,7 @@ def one_cycle_lr(epoch_num,
347347
},
348348
{
349349
'start_lr': initial_lr,
350-
'end_lr': end_lr,
350+
'end_lr': end_learning_rate,
351351
},
352352
]
353353
else:
@@ -359,7 +359,7 @@ def one_cycle_lr(epoch_num,
359359
},
360360
{
361361
'start_lr': max_learning_rate,
362-
'end_lr': end_lr,
362+
'end_lr': end_learning_rate,
363363
},
364364
]
365365

@@ -539,10 +539,10 @@ def test_scheduler(self):
539539
max_learning_rate=-1.5, total_steps=20)
540540
with self.assertRaises(TypeError):
541541
paddle.optimizer.lr.OneCycleLR(
542-
max_learning_rate=0.1, total_steps=20, end_lr='test')
542+
max_learning_rate=0.1, total_steps=20, end_learning_rate='test')
543543
with self.assertRaises(ValueError):
544544
paddle.optimizer.lr.OneCycleLR(
545-
max_learning_rate=0.1, total_steps=20, end_lr=-1)
545+
max_learning_rate=0.1, total_steps=20, end_learning_rate=-1)
546546
with self.assertRaises(TypeError):
547547
paddle.optimizer.lr.OneCycleLR(
548548
max_learning_rate=0.1, total_steps='test')
@@ -622,31 +622,31 @@ def test_scheduler(self):
622622
"max_learning_rate": 0.1,
623623
"total_steps": 20,
624624
"divide_factor": 5,
625-
"end_lr": 0.0001,
625+
"end_learning_rate": 0.0001,
626626
"anneal_strategy": 'cos',
627627
"phase_pct": 0.3,
628628
"three_phase": False,
629629
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
630630
"max_learning_rate": 0.5,
631631
"total_steps": 20,
632632
"divide_factor": 10,
633-
"end_lr": 0.001,
633+
"end_learning_rate": 0.001,
634634
"anneal_strategy": 'linear',
635635
"phase_pct": 0.4,
636636
"three_phase": False,
637637
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
638638
"max_learning_rate": 1.0,
639639
"total_steps": 20,
640640
"divide_factor": 9,
641-
"end_lr": 0.0001,
641+
"end_learning_rate": 0.0001,
642642
"anneal_strategy": 'cos',
643643
"phase_pct": 0.3,
644644
"three_phase": True,
645645
}), (one_cycle_lr, paddle.optimizer.lr.OneCycleLR, {
646646
"max_learning_rate": 0.3,
647647
"total_steps": 20,
648648
"divide_factor": 25,
649-
"end_lr": 0.0005,
649+
"end_learning_rate": 0.0005,
650650
"anneal_strategy": 'linear',
651651
"phase_pct": 0.2,
652652
"three_phase": True,

python/paddle/optimizer/lr.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1613,7 +1613,7 @@ class OneCycleLR(LRScheduler):
16131613
Functionally, it defines the initial learning rate by ``divide_factor`` .
16141614
total_steps (int): Number of total training steps.
16151615
divide_factor (float): Initial learning rate will be determined by initial_learning_rate = max_learning_rate / divide_factor. Default: 25.
1616-
end_lr (float, optional): The minimum learning rate during training, it should be much less than initial learning rate.
1616+
end_learning_rate (float, optional): The minimum learning rate during training, it should be much less than initial learning rate.
16171617
phase_pct (float): The percentage of total steps which used to increasing learning rate. Default: 0.3.
16181618
anneal_strategy (str, optional): Strategy of adjusting learning rate.'cos' for cosine annealing,
16191619
'linear' for linear annealing. Default: 'cos'.
@@ -1682,7 +1682,7 @@ def __init__(self,
16821682
max_learning_rate,
16831683
total_steps,
16841684
divide_factor=25.,
1685-
end_lr=0.0001,
1685+
end_learning_rate=0.0001,
16861686
phase_pct=0.3,
16871687
anneal_strategy='cos',
16881688
three_phase=False,
@@ -1696,13 +1696,13 @@ def __init__(self,
16961696
if max_learning_rate < 0:
16971697
raise ValueError("'max_learning_rate' must be a positive integer.")
16981698

1699-
# Check type and value of end_lr
1700-
if not isinstance(end_lr, (float, int)):
1699+
# Check type and value of end_learning_rate
1700+
if not isinstance(end_learning_rate, (float, int)):
17011701
raise TypeError(
1702-
"'end_lr' must be 'float' or 'int', but received {}".format(
1703-
type(total_steps)))
1704-
if end_lr < 0:
1705-
raise ValueError("'end_lr' must be a positive integer.")
1702+
"'end_learning_rate' must be 'float' or 'int', but received {}".
1703+
format(type(total_steps)))
1704+
if end_learning_rate < 0:
1705+
raise ValueError("'end_learning_rate' must be a positive integer.")
17061706

17071707
# Check type and value of total_steps
17081708
if not isinstance(total_steps, int):
@@ -1728,7 +1728,7 @@ def __init__(self,
17281728
format(type(divide_factor)))
17291729

17301730
initial_lr = max_learning_rate / float(divide_factor)
1731-
min_lr = float(end_lr)
1731+
min_lr = float(end_learning_rate)
17321732

17331733
if three_phase:
17341734
if phase_pct >= 0.5:

0 commit comments

Comments
 (0)