Search in sources :

Example 1 with MultilayerPerceptron

use of org.apache.ignite.ml.nn.MultilayerPerceptron in project ignite by apache.

the class MLPGroupTrainerExample method main.

/**
 * Executes example.
 *
 * @param args Command line arguments, none required.
 */
public static void main(String[] args) throws InterruptedException {
    // IMPL NOTE based on MLPGroupTrainerTest#testXOR
    System.out.println(">>> Distributed multilayer perceptron example started.");
    // Start ignite grid.
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        System.out.println(">>> Ignite grid started.");
        // Create IgniteThread, we must work with SparseDistributedMatrix inside IgniteThread
        // because we create ignite cache internally.
        IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), MLPGroupTrainerExample.class.getSimpleName(), () -> {
            int samplesCnt = 10000;
            Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
            Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
            MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
            IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = LabeledVectorsCache.createNew(ignite);
            String cacheName = cache.getName();
            Random rnd = new Random(12345L);
            try (IgniteDataStreamer<Integer, LabeledVector<Vector, Vector>> streamer = ignite.dataStreamer(cacheName)) {
                streamer.perNodeBufferSize(100);
                for (int i = 0; i < samplesCnt; i++) {
                    int col = Math.abs(rnd.nextInt()) % 4;
                    streamer.addData(i, new LabeledVector<>(xorInputs.getCol(col), xorOutputs.getCol(col)));
                }
            }
            int totalCnt = 100;
            int failCnt = 0;
            MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withSyncPeriod(3).withTolerance(0.001).withMaxGlobalSteps(20);
            for (int i = 0; i < totalCnt; i++) {
                MLPGroupUpdateTrainerCacheInput trainerInput = new MLPGroupUpdateTrainerCacheInput(conf, new RandomInitializer(rnd), 6, cache, 10);
                MultilayerPerceptron mlp = trainer.train(trainerInput);
                Matrix predict = mlp.apply(xorInputs);
                System.out.println(">>> Prediction data at step " + i + " of total " + totalCnt + ":");
                Tracer.showAscii(predict);
                System.out.println("Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
                failCnt += closeEnough(xorOutputs.getRow(0), predict.getRow(0)) ? 0 : 1;
            }
            double failRatio = (double) failCnt / totalCnt;
            System.out.println("\n>>> Fail percentage: " + (failRatio * 100) + "%.");
            System.out.println("\n>>> Distributed multilayer perceptron example completed.");
        });
        igniteThread.start();
        igniteThread.join();
    }
}
Also used : MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) Random(java.util.Random) RPropParameterUpdate(org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate) Ignite(org.apache.ignite.Ignite) IgniteThread(org.apache.ignite.thread.IgniteThread) RandomInitializer(org.apache.ignite.ml.nn.initializers.RandomInitializer) MLPGroupUpdateTrainerCacheInput(org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput)

Example 2 with MultilayerPerceptron

use of org.apache.ignite.ml.nn.MultilayerPerceptron in project ignite by apache.

the class MLPLocalTrainerExample method main.

/**
 * Executes example.
 *
 * @param args Command line arguments, none required.
 */
public static void main(String[] args) {
    // IMPL NOTE based on MLPLocalTrainerTest#testXORRProp
    System.out.println(">>> Local multilayer perceptron example started.");
    Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    System.out.println("\n>>> Input data:");
    Tracer.showAscii(xorInputs);
    Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
    MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
    SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(1234L), xorInputs, xorOutputs, 4);
    System.out.println("\n>>> Perform training.");
    MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, RPropUpdateCalculator::new, 0.0001, 16000).train(trainerInput);
    System.out.println("\n>>> Apply model.");
    Matrix predict = mlp.apply(xorInputs);
    System.out.println("\n>>> Predicted data:");
    Tracer.showAscii(predict);
    System.out.println("\n>>> Reference expected data:");
    Tracer.showAscii(xorOutputs);
    System.out.println("\n>>> Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
    System.out.println("\n>>> Local multilayer perceptron example completed.");
}
Also used : MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) Random(java.util.Random) DenseLocalOnHeapMatrix(org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)

Example 3 with MultilayerPerceptron

use of org.apache.ignite.ml.nn.MultilayerPerceptron in project ignite by apache.

the class MnistDistributed method testMNISTDistributed.

/**
 */
public void testMNISTDistributed() throws IOException {
    int samplesCnt = 60_000;
    int hiddenNeuronsCnt = 100;
    IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> trainingAndTest = loadMnist(samplesCnt);
    // Load training mnist part into a cache.
    Stream<DenseLocalOnHeapVector> trainingMnist = trainingAndTest.get1();
    List<DenseLocalOnHeapVector> trainingMnistLst = trainingMnist.collect(Collectors.toList());
    IgniteCache<Integer, LabeledVector<Vector, Vector>> labeledVectorsCache = LabeledVectorsCache.createNew(ignite);
    loadIntoCache(trainingMnistLst, labeledVectorsCache);
    MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withMaxGlobalSteps(35).withSyncPeriod(2);
    MLPArchitecture arch = new MLPArchitecture(FEATURES_CNT).withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID).withAddedLayer(10, false, Activators.SIGMOID);
    MultilayerPerceptron mdl = trainer.train(new MLPGroupUpdateTrainerCacheInput(arch, 9, labeledVectorsCache, 2000));
    IgniteBiTuple<Matrix, Matrix> testDs = createDataset(trainingAndTest.get2(), 10_000, FEATURES_CNT);
    Vector truth = testDs.get2().foldColumns(VectorUtils::vec2Num);
    Vector predicted = mdl.apply(testDs.get1()).foldColumns(VectorUtils::vec2Num);
    Tracer.showAscii(truth);
    Tracer.showAscii(predicted);
    X.println("Accuracy: " + VectorUtils.zipWith(predicted, truth, (x, y) -> x.equals(y) ? 1.0 : 0.0).sum() / truth.size() * 100 + "%.");
}
Also used : VectorUtils(org.apache.ignite.ml.math.VectorUtils) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) RPropParameterUpdate(org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate) Stream(java.util.stream.Stream) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) Vector(org.apache.ignite.ml.math.Vector) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) MLPGroupUpdateTrainerCacheInput(org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput)

Example 4 with MultilayerPerceptron

use of org.apache.ignite.ml.nn.MultilayerPerceptron in project ignite by apache.

the class MnistLocal method tstMNISTLocal.

/**
 * Run nn classifier on MNIST using bi-indexed cache as a storage for dataset.
 * To run this test rename this method so it starts from 'test'.
 *
 * @throws IOException In case of loading MNIST dataset errors.
 */
@Test
public void tstMNISTLocal() throws IOException {
    int samplesCnt = 60_000;
    int featCnt = 28 * 28;
    int hiddenNeuronsCnt = 100;
    IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> trainingAndTest = loadMnist(samplesCnt);
    Stream<DenseLocalOnHeapVector> trainingMnistStream = trainingAndTest.get1();
    Stream<DenseLocalOnHeapVector> testMnistStream = trainingAndTest.get2();
    IgniteBiTuple<Matrix, Matrix> ds = createDataset(trainingMnistStream, samplesCnt, featCnt);
    IgniteBiTuple<Matrix, Matrix> testDs = createDataset(testMnistStream, 10000, featCnt);
    MLPArchitecture conf = new MLPArchitecture(featCnt).withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID).withAddedLayer(10, false, Activators.SIGMOID);
    SimpleMLPLocalBatchTrainerInput input = new SimpleMLPLocalBatchTrainerInput(conf, new Random(), ds.get1(), ds.get2(), 2000);
    MultilayerPerceptron mdl = new MLPLocalBatchTrainer<>(LossFunctions.MSE, () -> new RPropUpdateCalculator(0.1, 1.2, 0.5), 1E-7, 200).train(input);
    X.println("Training started");
    long before = System.currentTimeMillis();
    X.println("Training finished in " + (System.currentTimeMillis() - before));
    Vector predicted = mdl.apply(testDs.get1()).foldColumns(VectorUtils::vec2Num);
    Vector truth = testDs.get2().foldColumns(VectorUtils::vec2Num);
    Tracer.showAscii(truth);
    Tracer.showAscii(predicted);
    X.println("Accuracy: " + VectorUtils.zipWith(predicted, truth, (x, y) -> x.equals(y) ? 1.0 : 0.0).sum() / truth.size() * 100 + "%.");
}
Also used : SimpleMLPLocalBatchTrainerInput(org.apache.ignite.ml.nn.SimpleMLPLocalBatchTrainerInput) VectorUtils(org.apache.ignite.ml.math.VectorUtils) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) RPropUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.Matrix) Random(java.util.Random) Stream(java.util.stream.Stream) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) Vector(org.apache.ignite.ml.math.Vector) DenseLocalOnHeapVector(org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector) Test(org.junit.Test)

Aggregations

Matrix (org.apache.ignite.ml.math.Matrix)4 MultilayerPerceptron (org.apache.ignite.ml.nn.MultilayerPerceptron)4 MLPArchitecture (org.apache.ignite.ml.nn.architecture.MLPArchitecture)4 Random (java.util.Random)3 Stream (java.util.stream.Stream)2 Vector (org.apache.ignite.ml.math.Vector)2 VectorUtils (org.apache.ignite.ml.math.VectorUtils)2 DenseLocalOnHeapMatrix (org.apache.ignite.ml.math.impls.matrix.DenseLocalOnHeapMatrix)2 DenseLocalOnHeapVector (org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector)2 MLPGroupUpdateTrainerCacheInput (org.apache.ignite.ml.nn.MLPGroupUpdateTrainerCacheInput)2 RPropParameterUpdate (org.apache.ignite.ml.optimization.updatecalculators.RPropParameterUpdate)2 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)2 Ignite (org.apache.ignite.Ignite)1 SimpleMLPLocalBatchTrainerInput (org.apache.ignite.ml.nn.SimpleMLPLocalBatchTrainerInput)1 RandomInitializer (org.apache.ignite.ml.nn.initializers.RandomInitializer)1 RPropUpdateCalculator (org.apache.ignite.ml.optimization.updatecalculators.RPropUpdateCalculator)1 IgniteThread (org.apache.ignite.thread.IgniteThread)1 Test (org.junit.Test)1