use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class BNGradientCheckTest method testGradient2dFixedGammaBeta.
@Test
public void testGradient2dFixedGammaBeta() {
DataNormalization scaler = new NormalizerMinMaxScaler();
DataSetIterator iter = new IrisDataSetIterator(150, 150);
scaler.fit(iter);
iter.setPreProcessor(scaler);
DataSet ds = iter.next();
INDArray input = ds.getFeatureMatrix();
INDArray labels = ds.getLabels();
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().learningRate(1.0).regularization(false).updater(Updater.NONE).seed(12345L).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build()).layer(1, new BatchNormalization.Builder().lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3).build()).layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build()).layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3).build()).pretrain(false).backprop(true);
MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
mln.init();
if (PRINT_RESULTS) {
for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
}
boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
assertTrue(gradOK);
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method testLfwModel.
@Test
public void testLfwModel() throws Exception {
final int numRows = 28;
final int numColumns = 28;
int numChannels = 3;
int outputNum = LFWLoader.SUB_NUM_LABELS;
int numSamples = 4;
int batchSize = 2;
int iterations = 1;
int seed = 123;
int listenerFreq = iterations;
LFWDataSetIterator lfw = new LFWDataSetIterator(batchSize, numSamples, new int[] { numRows, numColumns, numChannels }, outputNum, true, true, 1.0, new Random(seed));
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations).gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list().layer(0, new ConvolutionLayer.Builder(10, 10).nIn(numChannels).nOut(6).weightInit(WeightInit.XAVIER).activation(Activation.RELU).build()).layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 }).stride(1, 1).build()).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()).setInputType(InputType.convolutionalFlat(numRows, numColumns, numChannels)).backprop(true).pretrain(false);
MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
model.init();
model.setListeners(new ScoreIterationListener(listenerFreq));
model.fit(lfw.next());
DataSet dataTest = lfw.next();
INDArray output = model.output(dataTest.getFeatureMatrix());
Evaluation eval = new Evaluation(outputNum);
eval.eval(dataTest.getLabels(), output);
System.out.println(eval.stats());
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method testIteratorDataSetIteratorSplitting.
@Test
public void testIteratorDataSetIteratorSplitting() {
//Test splitting large data sets into smaller ones
int origBatchSize = 4;
int origNumDSs = 3;
int batchSize = 3;
int numBatches = 4;
int featureSize = 5;
int labelSize = 6;
Nd4j.getRandom().setSeed(12345);
List<DataSet> orig = new ArrayList<>();
for (int i = 0; i < origNumDSs; i++) {
INDArray features = Nd4j.rand(origBatchSize, featureSize);
INDArray labels = Nd4j.rand(origBatchSize, labelSize);
orig.add(new DataSet(features, labels));
}
List<DataSet> expected = new ArrayList<>();
expected.add(new DataSet(orig.get(0).getFeatureMatrix().getRows(0, 1, 2), orig.get(0).getLabels().getRows(0, 1, 2)));
expected.add(new DataSet(Nd4j.vstack(orig.get(0).getFeatureMatrix().getRows(3), orig.get(1).getFeatureMatrix().getRows(0, 1)), Nd4j.vstack(orig.get(0).getLabels().getRows(3), orig.get(1).getLabels().getRows(0, 1))));
expected.add(new DataSet(Nd4j.vstack(orig.get(1).getFeatureMatrix().getRows(2, 3), orig.get(2).getFeatureMatrix().getRows(0)), Nd4j.vstack(orig.get(1).getLabels().getRows(2, 3), orig.get(2).getLabels().getRows(0))));
expected.add(new DataSet(orig.get(2).getFeatureMatrix().getRows(1, 2, 3), orig.get(2).getLabels().getRows(1, 2, 3)));
DataSetIterator iter = new IteratorDataSetIterator(orig.iterator(), batchSize);
int count = 0;
while (iter.hasNext()) {
DataSet ds = iter.next();
assertEquals(expected.get(count), ds);
count++;
}
assertEquals(count, numBatches);
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method testBatchSizeOfOneIris.
@Test
public void testBatchSizeOfOneIris() throws Exception {
//Test for (a) iterators returning correct number of examples, and
//(b) Labels are a proper one-hot vector (i.e., sum is 1.0)
//Iris:
DataSetIterator iris = new IrisDataSetIterator(1, 5);
int irisC = 0;
while (iris.hasNext()) {
irisC++;
DataSet ds = iris.next();
assertTrue(ds.getLabels().sum(Integer.MAX_VALUE).getDouble(0) == 1.0);
}
assertEquals(5, irisC);
}
use of org.nd4j.linalg.dataset.DataSet in project deeplearning4j by deeplearning4j.
the class DataSetIteratorTest method testMnist.
@Test
public void testMnist() throws Exception {
ClassPathResource cpr = new ClassPathResource("mnist_first_200.txt");
CSVRecordReader rr = new CSVRecordReader(0, ",");
rr.initialize(new FileSplit(cpr.getTempFileFromArchive()));
RecordReaderDataSetIterator dsi = new RecordReaderDataSetIterator(rr, 10, 0, 10);
MnistDataSetIterator iter = new MnistDataSetIterator(10, 200, false, true, false, 0);
while (dsi.hasNext()) {
DataSet dsExp = dsi.next();
DataSet dsAct = iter.next();
INDArray fExp = dsExp.getFeatureMatrix();
fExp.divi(255);
INDArray lExp = dsExp.getLabels();
INDArray fAct = dsAct.getFeatureMatrix();
INDArray lAct = dsAct.getLabels();
assertEquals(fExp, fAct);
assertEquals(lExp, lAct);
}
assertFalse(iter.hasNext());
}
Aggregations