use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class TestDataSetConsumer method consumeWhileHasNext.
/**
* This method cycles through iterator, whie iterator.hasNext() returns true. After each cycle execution time is simulated either using Thread.sleep() or empty cycle
*
* @param consumeWithSleep
* @return
*/
public long consumeWhileHasNext(boolean consumeWithSleep) {
count.set(0);
if (iterator == null)
throw new RuntimeException("Can't use consumeWhileHasNext() if iterator isn't set");
while (iterator.hasNext()) {
DataSet ds = iterator.next();
this.consumeOnce(ds, consumeWithSleep);
}
return count.get();
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class TestCompGraphCNN method testCNNComputationGraphKernelTooLarge.
@Test(expected = InvalidInputTypeException.class)
public void testCNNComputationGraphKernelTooLarge() {
int imageWidth = 23;
int imageHeight = 19;
int nChannels = 1;
int classes = 2;
int numSamples = 200;
int kernelHeight = 3;
int kernelWidth = imageWidth;
DataSet trainInput;
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1).seed(123).graphBuilder().addInputs("input").setInputTypes(InputType.convolutional(nChannels, imageWidth, imageHeight)).addLayer("conv1", new ConvolutionLayer.Builder().kernelSize(kernelHeight, kernelWidth).stride(1, 1).nIn(nChannels).nOut(2).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build(), "input").addLayer("pool1", new SubsamplingLayer.Builder().poolingType(SubsamplingLayer.PoolingType.MAX).kernelSize(imageHeight - kernelHeight + 1, 1).stride(1, 1).build(), "conv1").addLayer("output", new OutputLayer.Builder().nOut(classes).build(), "pool1").setOutputs("output").backprop(true).pretrain(false).build();
ComputationGraph model = new ComputationGraph(conf);
model.init();
INDArray emptyFeatures = Nd4j.zeros(numSamples, imageWidth * imageHeight * nChannels);
INDArray emptyLables = Nd4j.zeros(numSamples, classes);
trainInput = new DataSet(emptyFeatures, emptyLables);
model.fit(trainInput);
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class TestCompGraphCNN method getDS.
protected static DataSetIterator getDS() {
List<DataSet> list = new ArrayList<>(5);
for (int i = 0; i < 5; i++) {
INDArray f = Nd4j.create(1, 32 * 32 * 3);
INDArray l = Nd4j.create(1, 10);
l.putScalar(i, 1.0);
list.add(new DataSet(f, l));
}
return new ListDataSetIterator(list, 5);
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class TestComputationGraphNetwork method testForwardBasicIris.
@Test
public void testForwardBasicIris() {
ComputationGraphConfiguration configuration = getIrisGraphConfiguration();
ComputationGraph graph = new ComputationGraph(configuration);
graph.init();
MultiLayerConfiguration mlc = getIrisMLNConfiguration();
MultiLayerNetwork net = new MultiLayerNetwork(mlc);
net.init();
DataSetIterator iris = new IrisDataSetIterator(150, 150);
DataSet ds = iris.next();
graph.setInput(0, ds.getFeatureMatrix());
Map<String, INDArray> activations = graph.feedForward(false);
//2 layers + 1 input node
assertEquals(3, activations.size());
assertTrue(activations.containsKey("input"));
assertTrue(activations.containsKey("firstLayer"));
assertTrue(activations.containsKey("outputLayer"));
//Now: set parameters of both networks to be identical. Then feedforward, and check we get the same outputs
Nd4j.getRandom().setSeed(12345);
int nParams = getNumParams();
INDArray params = Nd4j.rand(1, nParams);
graph.setParams(params.dup());
net.setParams(params.dup());
List<INDArray> mlnAct = net.feedForward(ds.getFeatureMatrix(), false);
activations = graph.feedForward(ds.getFeatureMatrix(), false);
assertEquals(mlnAct.get(0), activations.get("input"));
assertEquals(mlnAct.get(1), activations.get("firstLayer"));
assertEquals(mlnAct.get(2), activations.get("outputLayer"));
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class TestComputationGraphNetwork method testScoringDataSet.
@Test
public void testScoringDataSet() {
ComputationGraphConfiguration configuration = getIrisGraphConfiguration();
ComputationGraph graph = new ComputationGraph(configuration);
graph.init();
MultiLayerConfiguration mlc = getIrisMLNConfiguration();
MultiLayerNetwork net = new MultiLayerNetwork(mlc);
net.init();
DataSetIterator iris = new IrisDataSetIterator(150, 150);
DataSet ds = iris.next();
//Now: set parameters of both networks to be identical. Then feedforward, and check we get the same score
Nd4j.getRandom().setSeed(12345);
int nParams = getNumParams();
INDArray params = Nd4j.rand(1, nParams);
graph.setParams(params.dup());
net.setParams(params.dup());
double scoreMLN = net.score(ds, false);
double scoreCG = graph.score(ds, false);
assertEquals(scoreMLN, scoreCG, 1e-4);
}
Aggregations