Search in sources :

Example 6 with MLPArchitecture

use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.

the class MLPLocalTrainerExample method main.

/**
 * Executes example.
 *
 * @param args Command line arguments, none required.
 */
public static void main(String[] args) {
    // IMPL NOTE based on MLPLocalTrainerTest#testXORRProp
    System.out.println(">>> Local multilayer perceptron example started.");
    Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    System.out.println("\n>>> Input data:");
    Tracer.showAscii(xorInputs);
    Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
    SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(1234L), xorInputs, xorOutputs, 4);
    System.out.println("\n>>> Perform training.");
    MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, RPropUpdateCalculator::new, 0.0001, 16000).train(trainerInput);
    System.out.println("\n>>> Apply model.");
    Matrix predict = mlp.apply(xorInputs);
    System.out.println("\n>>> Predicted data:");
    Tracer.showAscii(predict);
    System.out.println("\n>>> Reference expected data:");
    Tracer.showAscii(xorOutputs);
    System.out.println("\n>>> Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
    System.out.println("\n>>> Local multilayer perceptron example completed.");
}
Also used : MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) Random(java.util.Random) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)

Example 7 with MLPArchitecture

use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.

the class MLPLocalTrainerTest method xorTest.

/**
 * Common method for testing 'XOR' with various updaters.
 * @param updaterSupplier Updater supplier.
 * @param <P> Updater parameters type.
 */
private <P> void xorTest(IgniteSupplier<ParameterUpdateCalculator<? super MultilayerPerceptron, P>> updaterSupplier) {
    Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
    SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(123L), xorInputs, xorOutputs, 4);
    MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, updaterSupplier, 0.0001, 16000).train(trainerInput);
    Matrix predict = mlp.apply(xorInputs);
    Tracer.showAscii(predict);
    X.println(xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2) + "");
    TestUtils.checkIsInEpsilonNeighbourhood(xorOutputs.getRow(0), predict.getRow(0), 1E-1);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) Random(java.util.Random) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)

Example 8 with MLPArchitecture

use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.

the class MLPTest method setParamsFlattening.

/**
 * Test methods related to parameters flattening.
 */
@Test
public void setParamsFlattening() {
    int inputSize = 3;
    int firstLayerNeuronsCnt = 2;
    int secondLayerNeurons = 1;
    DenseLocalOnHeapVector paramsVector = new DenseLocalOnHeapVector(new double[] { // First layer weight matrix.
    1.0, // First layer weight matrix.
    2.0, // First layer weight matrix.
    3.0, // First layer weight matrix.
    4.0, // First layer weight matrix.
    5.0, // First layer weight matrix.
    6.0, // Second layer weight matrix.
    7.0, // Second layer weight matrix.
    8.0, // Second layer biases.
    9.0 });
    DenseLocalOnHeapMatrix firstLayerWeights = new DenseLocalOnHeapMatrix(new double[][] { { 1.0, 2.0, 3.0 }, { 4.0, 5.0, 6.0 } });
    DenseLocalOnHeapMatrix secondLayerWeights = new DenseLocalOnHeapMatrix(new double[][] { { 7.0, 8.0 } });
    DenseLocalOnHeapVector secondLayerBiases = new DenseLocalOnHeapVector(new double[] { 9.0 });
    MLPArchitecture conf = new MLPArchitecture(inputSize).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeurons, true, Activators.SIGMOID);
    MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(100, 200));
    mlp.setParameters(paramsVector);
    Assert.assertEquals(paramsVector, mlp.parameters());
    Assert.assertEquals(mlp.weights(1), firstLayerWeights);
    Assert.assertEquals(mlp.weights(2), secondLayerWeights);
    Assert.assertEquals(mlp.biases(2), secondLayerBiases);
}
Also used : MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) Test(org.junit.Test)

Example 9 with MLPArchitecture

use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.

the class MLPTest method testXOR.

/**
 * Test that MLP with parameters that should produce function close to 'XOR' is close to 'XOR' on 'XOR' domain.
 */
@Test
public void testXOR() {
    MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(2, true, Activators.SIGMOID).withAddedLayer(1, true, Activators.SIGMOID);
    MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2));
    mlp.setWeights(1, new DenseLocalOnHeapMatrix(new double[][] { { 20.0, 20.0 }, { -20.0, -20.0 } }));
    mlp.setBiases(1, new DenseLocalOnHeapVector(new double[] { -10.0, 30.0 }));
    mlp.setWeights(2, new DenseLocalOnHeapMatrix(new double[][] { { 20.0, 20.0 } }));
    mlp.setBiases(2, new DenseLocalOnHeapVector(new double[] { -30.0 }));
    Matrix input = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }).transpose();
    Matrix predict = mlp.apply(input);
    Vector truth = new DenseLocalOnHeapVector(new double[] { 0.0, 1.0, 1.0, 0.0 });
    TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth, 1E-4);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) Vector(org.apache.ignite.ml.math.Vector) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) Test(org.junit.Test)

Example 10 with MLPArchitecture

use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.

the class MLPTest method testStackedMLP.

/**
 * Test that two layer MLP is equivalent to it's subparts stacked on each other.
 */
@Test
public void testStackedMLP() {
    int firstLayerNeuronsCnt = 3;
    int secondLayerNeuronsCnt = 2;
    MLPConstInitializer initer = new MLPConstInitializer(1, 2);
    MLPArchitecture conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
    MultilayerPerceptron mlp = new MultilayerPerceptron(conf, initer);
    MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID);
    MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
    MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer);
    MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer);
    MultilayerPerceptron stackedMLP = mlp1.add(mlp2);
    Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
    Matrix stackedPredict = stackedMLP.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
    Assert.assertEquals(predict, stackedPredict);
}
Also used : Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) Test(org.junit.Test)

Aggregations

MLPArchitecture (org.apache.ignite.ml.nn.architecture.MLPArchitecture)12 Matrix (org.apache.ignite.ml.math.Matrix)10 DenseLocalOnHeapMatrix (org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)9 Test (org.junit.Test)7 Random (java.util.Random)5 DenseLocalOnHeapVector (org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector)5 Vector (org.apache.ignite.ml.math.Vector)4 MultilayerPerceptron (org.apache.ignite.ml.nn.MultilayerPerceptron)4 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)3 Stream (java.util.stream.Stream)2 VectorUtils (org.apache.ignite.ml.math.VectorUtils)2 MLPGroupUpdateTrainerCacheInput (org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput)2 RandomInitializer (org.apache.ignite.ml.nn.initializers.RandomInitializer)2 RPropParameterUpdate (org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate)2 Ignite (org.apache.ignite.Ignite)1 TestUtils (org.apache.ignite.ml.TestUtils)1 Tracer (org.apache.ignite.ml.math.Tracer)1 IgniteTriFunction (org.apache.ignite.ml.math.functions.IgniteTriFunction)1 SimpleMLPLocalBatchTrainerInput (org.apache.ignite.ml.nn.SimpleMLPLocalBatchTrainerInput)1 LossFunctions (org.apache.ignite.ml.optimization.LossFunctions)1