use of org.deeplearning4j.exception.DL4JException in project deeplearning4j by deeplearning4j.
the class TestInvalidInput method testLabelsNOutMismatchOutputLayer.
@Test
public void testLabelsNOutMismatchOutputLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 10), Nd4j.create(1, 20));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testLabelsNOutMismatchOutputLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
use of org.deeplearning4j.exception.DL4JException in project deeplearning4j by deeplearning4j.
the class TestInvalidInput method testInputNinMismatchDense.
@Test
public void testInputNinMismatchDense() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(1, 20));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchDense(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
use of org.deeplearning4j.exception.DL4JException in project deeplearning4j by deeplearning4j.
the class TestInvalidInput method testInputNinMismatchLSTM.
@Test
public void testInputNinMismatchLSTM() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new GravesLSTM.Builder().nIn(5).nOut(5).build()).layer(1, new RnnOutputLayer.Builder().nIn(5).nOut(5).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.fit(Nd4j.create(1, 10, 5), Nd4j.create(1, 5, 5));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchLSTM(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
use of org.deeplearning4j.exception.DL4JException in project deeplearning4j by deeplearning4j.
the class TestInvalidInput method testInputNinMismatchEmbeddingLayer.
@Test
public void testInputNinMismatchEmbeddingLayer() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new EmbeddingLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder().nIn(10).nOut(10).build()).build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
try {
net.feedForward(Nd4j.create(10, 5));
fail("Expected DL4JException");
} catch (DL4JException e) {
System.out.println("testInputNinMismatchEmbeddingLayer(): " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
fail("Expected DL4JException");
}
}
use of org.deeplearning4j.exception.DL4JException in project deeplearning4j by deeplearning4j.
the class TestConvolutionModes method testStrictTruncateConvolutionModeCompGraph.
@Test
public void testStrictTruncateConvolutionModeCompGraph() {
//Idea: with convolution mode == Truncate, input size shouldn't matter (within the bounds of truncated edge),
// and edge data shouldn't affect the output
//Use: 9x9, kernel 3, stride 3, padding 0
// Should get same output for 10x10 and 11x11...
Nd4j.getRandom().setSeed(12345);
int[] minibatches = { 1, 3 };
int[] inDepths = { 1, 3 };
int[] inSizes = { 9, 10, 11 };
for (boolean isSubsampling : new boolean[] { false, true }) {
for (int minibatch : minibatches) {
for (int inDepth : inDepths) {
INDArray origData = Nd4j.rand(new int[] { minibatch, inDepth, 9, 9 });
for (int inSize : inSizes) {
for (ConvolutionMode cm : new ConvolutionMode[] { ConvolutionMode.Strict, ConvolutionMode.Truncate }) {
INDArray inputData = Nd4j.rand(new int[] { minibatch, inDepth, inSize, inSize });
inputData.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 9), NDArrayIndex.interval(0, 9)).assign(origData);
Layer layer;
if (isSubsampling) {
layer = new SubsamplingLayer.Builder().kernelSize(3, 3).stride(3, 3).padding(0, 0).build();
} else {
layer = new ConvolutionLayer.Builder().kernelSize(3, 3).stride(3, 3).padding(0, 0).nOut(3).build();
}
ComputationGraph net = null;
try {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER).convolutionMode(cm).graphBuilder().addInputs("in").addLayer("0", layer, "in").addLayer("1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT).nOut(3).build(), "0").setOutputs("1").setInputTypes(InputType.convolutional(inSize, inSize, inDepth)).build();
net = new ComputationGraph(conf);
net.init();
if (inSize > 9 && cm == ConvolutionMode.Strict) {
fail("Expected exception");
}
} catch (DL4JException e) {
if (inSize == 9 || cm != ConvolutionMode.Strict) {
e.printStackTrace();
fail("Unexpected exception");
}
//Expected exception
continue;
} catch (Exception e) {
e.printStackTrace();
fail("Unexpected exception");
}
INDArray out = net.outputSingle(origData);
INDArray out2 = net.outputSingle(inputData);
assertEquals(out, out2);
}
}
}
}
}
}
Aggregations