Skip to content

Commit

Permalink
Merge pull request deeplearning4j#1492 from deeplearning4j/mw_bn_quirk
Browse files Browse the repository at this point in the history
Fixed test error by setting nOut for bn.
  • Loading branch information
nyghtowl committed May 2, 2016
2 parents bb723e8 + 14f3805 commit 36804c6
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,6 @@ public void testCNNComputationGraphKernelTooLarge() {
}

@Test
@Ignore
public void testCNNComputationGraphSingleOutFeatureMap() {
int imageWidth = 23;
int imageHeight = 23;
Expand Down Expand Up @@ -234,7 +233,7 @@ public void testCNNComputationGraphSingleOutFeatureMap() {
.build(), "input")
.addLayer("pool1", new SubsamplingLayer.Builder()
.poolingType(SubsamplingLayer.PoolingType.MAX)
.kernelSize(imageHeight - kernelHeight + 1, 1)
.kernelSize(imageHeight - kernelHeight, 1)
.stride(1, 1)
.build(), "conv1")
.addLayer("output", new OutputLayer.Builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ public void testBatchNorm() {
.list()
.layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build())
.layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build())
.layer(2, new BatchNormalization.Builder().build())
.layer(2, new BatchNormalization.Builder().nOut(2).build())
.layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.weightInit(WeightInit.XAVIER)
.activation("softmax")
Expand Down Expand Up @@ -650,7 +650,7 @@ public void testDataSetScoreCNN(){
.learningRate(1.0)
.seed(12345L)
.list()
.layer(0, new ConvolutionLayer.Builder(2,2).nOut(3).build())
.layer(0, new ConvolutionLayer.Builder(2,2).nOut(1).build())
.layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nOut(2).build())
.cnnInputSize(3,3,2)
.pretrain(false).backprop(true).build();
Expand Down

0 comments on commit 36804c6

Please sign in to comment.