From 14f380582bac9fae9f47a249c233188351f47a0d Mon Sep 17 00:00:00 2001 From: nyghtowl Date: Mon, 2 May 2016 09:07:05 -0700 Subject: [PATCH] Fixed test error by setting nOut for bn. Required when not using cnnInputSize --- .../java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index 0b973e6cdf70..b428cbf9e948 100644 --- a/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -139,7 +139,7 @@ public void testBatchNorm() { .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build()) - .layer(2, new BatchNormalization.Builder().build()) + .layer(2, new BatchNormalization.Builder().nOut(2).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER) .activation("softmax")