Skip to content

Commit

Permalink
init upsampling 1d test
Browse files Browse the repository at this point in the history
  • Loading branch information
maxpumperla committed Sep 12, 2017
1 parent fd6e39f commit d447151
Showing 1 changed file with 26 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.Upsampling2D;
import org.deeplearning4j.nn.conf.layers.Upsampling1D;
import org.deeplearning4j.nn.gradient.Gradient;
import org.junit.Test;
import org.nd4j.linalg.api.ndarray.INDArray;
Expand All @@ -26,48 +26,44 @@ public class Upsampling1DTest {
private int nExamples = 1;
private int depth = 20;
private int nChannelsIn = 1;
private int inputWidth = 28;
private int inputHeight = 28;

private int inputLength = 28;
private int size = 2;
private int outputWidth = inputWidth * size;
private int outputHeight = inputHeight * size;

private INDArray epsilon = Nd4j.ones(nExamples, depth, outputHeight, outputWidth);
private int outputLength = inputLength * size;
private INDArray epsilon = Nd4j.ones(nExamples, depth, outputLength);


@Test
public void testUpsampling() throws Exception {
public void testUpsampling1D() throws Exception {

double[] outArray = new double[] {1., 1., 2., 2., 1., 1., 2., 2., 3., 3., 4., 4., 3., 3., 4., 4.};
INDArray containedExpectedOut = Nd4j.create(outArray, new int[] {1, 1, 4, 4});
double[] outArray = new double[] {1., 1., 2., 2., 3., 3., 4., 4.};
INDArray containedExpectedOut = Nd4j.create(outArray, new int[] {1, 1, 8});
INDArray containedInput = getContainedData();
INDArray input = getData();
Layer layer = getUpsamplingLayer();
// INDArray input = getData();
Layer layer = getUpsampling1DLayer();

INDArray containedOutput = layer.activate(containedInput);
assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape()));
assertEquals(containedExpectedOut, containedOutput);

INDArray output = layer.activate(input);
assertTrue(Arrays.equals(new int[] {nExamples, nChannelsIn, outputWidth, outputHeight},
output.shape()));
assertEquals(nChannelsIn, output.size(1), 1e-4);
// INDArray output = layer.activate(input);
// assertTrue(Arrays.equals(new int[] {nExamples, nChannelsIn, outputLength},
// output.shape()));
// assertEquals(nChannelsIn, output.size(1), 1e-4);
}


@Test
public void testUpsampling2DBackprop() throws Exception {
public void testUpsampling1DBackprop() throws Exception {
INDArray expectedContainedEpsilonInput =
Nd4j.create(new double[] {1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.},
new int[] {1, 1, 4, 4});
Nd4j.create(new double[] {1., 1., 1., 1.},
new int[] {1, 1, 4});

INDArray expectedContainedEpsilonResult = Nd4j.create(new double[] {4., 4., 4., 4.},
new int[] {1, 1, 2, 2});
INDArray expectedContainedEpsilonResult = Nd4j.create(new double[] {4., 4.},
new int[] {1, 1, 2});

INDArray input = getContainedData();

Layer layer = getUpsamplingLayer();
Layer layer = getUpsampling1DLayer();
layer.activate(input);

Pair<Gradient, INDArray> containedOutput = layer.backpropGradient(expectedContainedEpsilonInput);
Expand All @@ -80,32 +76,33 @@ public void testUpsampling2DBackprop() throws Exception {
layer.activate(input2);
int depth = input2.size(1);

epsilon = Nd4j.ones(5, depth, outputHeight, outputWidth);
epsilon = Nd4j.ones(5, depth, outputLength);

Pair<Gradient, INDArray> out = layer.backpropGradient(epsilon);
assertEquals(input.shape().length, out.getSecond().shape().length);
assertEquals(depth, out.getSecond().size(1));
}


private Layer getUpsamplingLayer() {
private Layer getUpsampling1DLayer() {
NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder()
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123)
.layer(new Upsampling2D.Builder(size).build()).build();
return conf.getLayer().instantiate(conf, null, 0, null, true);
.layer(new Upsampling1D.Builder(size).build()).build();
return conf.getLayer().instantiate(conf, null, 0,
null, true);
}

public INDArray getData() throws Exception {
DataSetIterator data = new MnistDataSetIterator(5, 5);
DataSet mnist = data.next();
nExamples = mnist.numExamples();
return mnist.getFeatureMatrix().reshape(nExamples, nChannelsIn, inputWidth, inputHeight);
return mnist.getFeatureMatrix().reshape(nExamples, nChannelsIn, inputLength);
}

private INDArray getContainedData() {
INDArray ret = Nd4j.create
(new double[] {1., 2., 3., 4.},
new int[] {1, 1, 2, 2});
new int[] {1, 1, 4});
return ret;
}

Expand Down

0 comments on commit d447151

Please sign in to comment.