Skip to content

Commit

Permalink
Fix off-by-one error in EarlyStopping callback (keras-team#8100)
Browse files Browse the repository at this point in the history
  • Loading branch information
nicolewhite authored and fchollet committed Oct 10, 2017
1 parent 3c19add commit 1b53767
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 1 deletion.
2 changes: 1 addition & 1 deletion keras/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,10 +497,10 @@ def on_epoch_end(self, epoch, logs=None):
self.best = current
self.wait = 0
else:
self.wait += 1
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
self.wait += 1

def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
Expand Down
25 changes: 25 additions & 0 deletions tests/keras/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,31 @@ def test_EarlyStopping_reuse():
assert len(hist.epoch) >= patience


@keras_test
def test_EarlyStopping_patience():
class DummyModel(object):
def __init__(self):
self.stop_training = False

early_stop = callbacks.EarlyStopping(monitor='val_loss', patience=2)
early_stop.model = DummyModel()

losses = [0.0860, 0.1096, 0.1040, 0.1019]

# Should stop after epoch 3, as the loss has not improved after patience=2 epochs.
epochs_trained = 0
early_stop.on_train_begin()

for epoch in range(len(losses)):
epochs_trained += 1
early_stop.on_epoch_end(epoch, logs={'val_loss': losses[epoch]})

if early_stop.model.stop_training:
break

assert epochs_trained == 3


@keras_test
def test_LearningRateScheduler():
np.random.seed(1337)
Expand Down

0 comments on commit 1b53767

Please sign in to comment.