Search in sources :

Example 81 with Matrix

use of org.apache.ignite.ml.math.Matrix in project ignite by apache.

the class MLPTest method testStackedMLP.

/**
 * Test that two layer MLP is equivalent to it's subparts stacked on each other.
 */
@Test
public void testStackedMLP() {
    int firstLayerNeuronsCnt = 3;
    int secondLayerNeuronsCnt = 2;
    MLPConstInitializer initer = new MLPConstInitializer(1, 2);
    MLPArchitecture conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
    MultilayerPerceptron mlp = new MultilayerPerceptron(conf, initer);
    MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID);
    MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
    MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer);
    MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer);
    MultilayerPerceptron stackedMLP = mlp1.add(mlp2);
    Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
    Matrix stackedPredict = stackedMLP.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
    Assert.assertEquals(predict, stackedPredict);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) Test(org.junit.Test)

Example 82 with Matrix

use of org.apache.ignite.ml.math.Matrix in project ignite by apache.

the class MnistDistributed method testMNISTDistributed.

/**
 */
public void testMNISTDistributed() throws IOException {
    int samplesCnt = 60_000;
    int hiddenNeuronsCnt = 100;
    IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> trainingAndTest = loadMnist(samplesCnt);
    // Load training mnist part into a cache.
    Stream<DenseLocalOnHeapVector> trainingMnist = trainingAndTest.get1();
    List<DenseLocalOnHeapVector> trainingMnistLst = trainingMnist.collect(Collectors.toList());
    IgniteCache<Integer, LabeledVector<Vector, Vector>> labeledVectorsCache = LabeledVectorsCache.createNew(ignite);
    loadIntoCache(trainingMnistLst, labeledVectorsCache);
    MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withMaxGlobalSteps(35).withSyncPeriod(2);
    MLPArchitecture arch = new MLPArchitecture(FEATURES_CNT).withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID).withAddedLayer(10, false, Activators.SIGMOID);
    MultilayerPerceptron mdl = trainer.train(new MLPGroupUpdateTrainerCacheInput(arch, 9, labeledVectorsCache, 2000));
    IgniteBiTuple<Matrix, Matrix> testDs = createDataset(trainingAndTest.get2(), 10_000, FEATURES_CNT);
    Vector truth = testDs.get2().foldColumns(VectorUtils::vec2Num);
    Vector predicted = mdl.apply(testDs.get1()).foldColumns(VectorUtils::vec2Num);
    Tracer.showAscii(truth);
    Tracer.showAscii(predicted);
    X.println("Accuracy: " + VectorUtils.zipWith(predicted, truth, (x, y) -> x.equals(y) ? 1.0 : 0.0).sum() / truth.size() * 100 + "%.");
}
Also used : VectorUtils(org.apache.ignite.ml.math.VectorUtils) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) RPropParameterUpdate(org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate) Stream(java.util.stream.Stream) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) Vector(org.apache.ignite.ml.math.Vector) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) MLPGroupUpdateTrainerCacheInput(org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput)

Example 83 with Matrix

use of org.apache.ignite.ml.math.Matrix in project ignite by apache.

the class MnistLocal method tstMNISTLocal.

/**
 * Run nn classifier on MNIST using bi-indexed cache as a storage for dataset.
 * To run this test rename this method so it starts from 'test'.
 *
 * @throws IOException In case of loading MNIST dataset errors.
 */
@Test
public void tstMNISTLocal() throws IOException {
    int samplesCnt = 60_000;
    int featCnt = 28 * 28;
    int hiddenNeuronsCnt = 100;
    IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> trainingAndTest = loadMnist(samplesCnt);
    Stream<DenseLocalOnHeapVector> trainingMnistStream = trainingAndTest.get1();
    Stream<DenseLocalOnHeapVector> testMnistStream = trainingAndTest.get2();
    IgniteBiTuple<Matrix, Matrix> ds = createDataset(trainingMnistStream, samplesCnt, featCnt);
    IgniteBiTuple<Matrix, Matrix> testDs = createDataset(testMnistStream, 10000, featCnt);
    MLPArchitecture conf = new MLPArchitecture(featCnt).withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID).withAddedLayer(10, false, Activators.SIGMOID);
    SimpleMLPLocalBatchTrainerInput input = new SimpleMLPLocalBatchTrainerInput(conf, new Random(), ds.get1(), ds.get2(), 2000);
    MultilayerPerceptron mdl = new MLPLocalBatchTrainer<>(LossFunctions.MSE, () -> new RPropUpdateCalculator(0.1, 1.2, 0.5), 1E-7, 200).train(input);
    X.println("Training started");
    long before = System.currentTimeMillis();
    X.println("Training finished in " + (System.currentTimeMillis() - before));
    Vector predicted = mdl.apply(testDs.get1()).foldColumns(VectorUtils::vec2Num);
    Vector truth = testDs.get2().foldColumns(VectorUtils::vec2Num);
    Tracer.showAscii(truth);
    Tracer.showAscii(predicted);
    X.println("Accuracy: " + VectorUtils.zipWith(predicted, truth, (x, y) -> x.equals(y) ? 1.0 : 0.0).sum() / truth.size() * 100 + "%.");
}
Also used : SimpleMLPLocalBatchTrainerInput(org.apache.ignite.ml.nn.SimpleMLPLocalBatchTrainerInput) VectorUtils(org.apache.ignite.ml.math.VectorUtils) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) RPropUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) Random(java.util.Random) Stream(java.util.stream.Stream) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) Vector(org.apache.ignite.ml.math.Vector) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) Test(org.junit.Test)

Example 84 with Matrix

use of org.apache.ignite.ml.math.Matrix in project ignite by apache.

the class GenericLinearRegressionTrainerTest method testTrainOnBostonDataset.

/**
 * Test trainer on boston dataset.
 */
@Test
public void testTrainOnBostonDataset() {
    Matrix data = loadDataset("datasets/regression/boston.csv", 506, 13);
    LinearRegressionModel mdl = trainer.train(data);
    Vector expWeights = vectorCreator.apply(new double[] { -1.07170557e-01, 4.63952195e-02, 2.08602395e-02, 2.68856140e+00, -1.77957587e+01, 3.80475246e+00, 7.51061703e-04, -1.47575880e+00, 3.05655038e-01, -1.23293463e-02, -9.53463555e-01, 9.39251272e-03, -5.25466633e-01 });
    double expIntercept = 36.4911032804;
    TestUtils.assertEquals("Wrong weights", expWeights, mdl.getWeights(), precision);
    TestUtils.assertEquals("Wrong intercept", expIntercept, mdl.getIntercept(), precision);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) Vector(org.apache.ignite.ml.math.Vector) Test(org.junit.Test)

Example 85 with Matrix

use of org.apache.ignite.ml.math.Matrix in project ignite by apache.

the class GenericLinearRegressionTrainerTest method testTrainWithIntercept.

/**
 * Test trainer on regression model y = -1 * x + 1.
 */
@Test
public void testTrainWithIntercept() {
    Matrix data = matrixCreator.apply(new double[][] { { 1.0, 0.0 }, { 0.0, 1.0 } });
    LinearRegressionModel mdl = trainer.train(data);
    TestUtils.assertEquals(0.5, mdl.apply(vectorCreator.apply(new double[] { 0.5 })), precision);
    TestUtils.assertEquals(2, mdl.apply(vectorCreator.apply(new double[] { -1 })), precision);
    TestUtils.assertEquals(-1, mdl.apply(vectorCreator.apply(new double[] { 2 })), precision);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) Test(org.junit.Test)

Aggregations

Matrix (org.apache.ignite.ml.math.Matrix)131 DenseLocalOnHeapMatrix (org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)51 Test (org.junit.Test)48 Vector (org.apache.ignite.ml.math.Vector)30 DenseLocalOnHeapVector (org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector)18 ExternalizeTest (org.apache.ignite.ml.math.ExternalizeTest)17 MLPArchitecture (org.apache.ignite.ml.nn.architecture.MLPArchitecture)10 Random (java.util.Random)6 DenseLocalOffHeapMatrix (org.apache.ignite.ml.math.impls.matrix.DenseLocalOffHeapMatrix)6 SparseDistributedMatrix (org.apache.ignite.ml.math.impls.matrix.SparseDistributedMatrix)6 SparseLocalOnHeapMatrix (org.apache.ignite.ml.math.impls.matrix.SparseLocalOnHeapMatrix)6 Ignite (org.apache.ignite.Ignite)5 RandomMatrix (org.apache.ignite.ml.math.impls.matrix.RandomMatrix)5 FunctionVector (org.apache.ignite.ml.math.impls.vector.FunctionVector)5 CardinalityException (org.apache.ignite.ml.math.exceptions.CardinalityException)4 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)4 IgniteThread (org.apache.ignite.thread.IgniteThread)4 IgniteBiTuple (org.apache.ignite.lang.IgniteBiTuple)3 LUDecomposition (org.apache.ignite.ml.math.decompositions.LUDecomposition)3 QRDecomposition (org.apache.ignite.ml.math.decompositions.QRDecomposition)3