Skip to content

Commit

Permalink
chore: Remove learning rate decay
Browse files Browse the repository at this point in the history
Set no learning rate decay by default. If users want to set learning rate decays, they can do that, but the original SSD implementation does not use a smooth learning rate decay, it uses a step function learning rate scheduler instead.
  • Loading branch information
pierluigiferrari committed Apr 13, 2018
1 parent 6503012 commit 8a7c3ef
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 11 deletions.
4 changes: 2 additions & 2 deletions ssd300_evaluation_COCO.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,9 @@
"\n",
"# 3: Compile the model so that Keras won't complain the next time you load it.\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, n_neg_min=0, alpha=1.0)\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"model.compile(optimizer=adam, loss=ssd_loss.compute_loss)"
]
Expand Down
4 changes: 2 additions & 2 deletions ssd300_evaluation_Pascal_VOC.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,9 @@
"\n",
"# 3: Compile the model so that Keras won't complain the next time you load it.\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, n_neg_min=0, alpha=1.0)\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"model.compile(optimizer=adam, loss=ssd_loss.compute_loss)"
]
Expand Down
4 changes: 2 additions & 2 deletions ssd300_inference.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@
"\n",
"# 3: Compile the model so that Keras won't complain the next time you load it.\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, n_neg_min=0, alpha=1.0)\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"model.compile(optimizer=adam, loss=ssd_loss.compute_loss)"
]
Expand Down
4 changes: 2 additions & 2 deletions ssd512_inference.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,9 @@
"\n",
"# 3: Compile the model so that Keras won't complain the next time you load it.\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, n_neg_min=0, alpha=1.0)\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"model.compile(optimizer=adam, loss=ssd_loss.compute_loss)"
]
Expand Down
2 changes: 1 addition & 1 deletion ssd7_training.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
"\n",
"# 3: Instantiate an Adam optimizer and the SSD loss function and compile the model\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions weight_sampling_tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -530,9 +530,9 @@
"\n",
"# 3: Instantiate an Adam optimizer and the SSD loss function and compile the model.\n",
"\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=5e-04)\n",
"adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n",
"\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, n_neg_min=0, alpha=1.0)\n",
"ssd_loss = SSDLoss(neg_pos_ratio=3, alpha=1.0)\n",
"\n",
"model.compile(optimizer=adam, loss=ssd_loss.compute_loss)"
]
Expand Down

0 comments on commit 8a7c3ef

Please sign in to comment.