From f79cc92fae8b8ec9766f03cb2a6f3dbeb1609404 Mon Sep 17 00:00:00 2001 From: Guillaume Lemaitre Date: Sat, 19 Jan 2019 14:50:22 +0100 Subject: [PATCH] FIX: move the BatchNormalization before the activation with no bias (#531) --- doc/whats_new/v0.5.rst | 9 +++++++++ .../porto_seguro_keras_under_sampling.py | 14 +++++++------- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/doc/whats_new/v0.5.rst b/doc/whats_new/v0.5.rst index 964a4ad7b..28c929189 100644 --- a/doc/whats_new/v0.5.rst +++ b/doc/whats_new/v0.5.rst @@ -22,3 +22,12 @@ Maintenance - Make it possible to ``import imblearn`` and access submodule. :issue:`500` by :user:`Guillaume Lemaitre `. + +Bug +... + +- Fix wrong usage of :class:`keras.layers.BatchNormalization` in + ``porto_seguro_keras_under_sampling.py`` example. The batch normalization + was moved before the activation function and the bias was removed from the + dense layer. + :issue:`531` by :user:`Guillaume Lemaitre `. diff --git a/examples/applications/porto_seguro_keras_under_sampling.py b/examples/applications/porto_seguro_keras_under_sampling.py index f1a006660..4bee80550 100644 --- a/examples/applications/porto_seguro_keras_under_sampling.py +++ b/examples/applications/porto_seguro_keras_under_sampling.py @@ -98,20 +98,20 @@ def make_model(n_features): model = Sequential() model.add(Dense(200, input_shape=(n_features,), kernel_initializer='glorot_normal')) - model.add(Activation('relu')) model.add(BatchNormalization()) - model.add(Dropout(0.5)) - model.add(Dense(100, kernel_initializer='glorot_normal')) model.add(Activation('relu')) + model.add(Dropout(0.5)) + model.add(Dense(100, kernel_initializer='glorot_normal', use_bias=False)) model.add(BatchNormalization()) - model.add(Dropout(0.25)) - model.add(Dense(50, kernel_initializer='glorot_normal')) model.add(Activation('relu')) + model.add(Dropout(0.25)) + model.add(Dense(50, kernel_initializer='glorot_normal', use_bias=False)) model.add(BatchNormalization()) - model.add(Dropout(0.15)) - model.add(Dense(25, kernel_initializer='glorot_normal')) model.add(Activation('relu')) + model.add(Dropout(0.15)) + model.add(Dense(25, kernel_initializer='glorot_normal', use_bias=False)) model.add(BatchNormalization()) + model.add(Activation('relu')) model.add(Dropout(0.1)) model.add(Dense(1, activation='sigmoid'))