Search in sources :

Example 1 with DenseMatrix

use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.

the class MLPTrainerExample method main.

/**
 * Executes example.
 *
 * @param args Command line arguments, none required.
 */
public static void main(String[] args) {
    // IMPL NOTE based on MLPGroupTrainerTest#testXOR
    System.out.println(">>> Distributed multilayer perceptron example started.");
    // Start ignite grid.
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        System.out.println(">>> Ignite grid started.");
        // Create cache with training data.
        CacheConfiguration<Integer, LabeledVector<double[]>> trainingSetCfg = new CacheConfiguration<>();
        trainingSetCfg.setName("TRAINING_SET");
        trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
        IgniteCache<Integer, LabeledVector<double[]>> trainingSet = null;
        try {
            trainingSet = ignite.createCache(trainingSetCfg);
            // Fill cache with training data.
            trainingSet.put(0, new LabeledVector<>(VectorUtils.of(0, 0), new double[] { 0 }));
            trainingSet.put(1, new LabeledVector<>(VectorUtils.of(0, 1), new double[] { 1 }));
            trainingSet.put(2, new LabeledVector<>(VectorUtils.of(1, 0), new double[] { 1 }));
            trainingSet.put(3, new LabeledVector<>(VectorUtils.of(1, 1), new double[] { 0 }));
            // Define a layered architecture.
            MLPArchitecture arch = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
            // Define a neural network trainer.
            MLPTrainer<SimpleGDParameterUpdate> trainer = new MLPTrainer<>(arch, LossFunctions.MSE, new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.1), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG), 3000, 4, 50, 123L);
            // Train neural network and get multilayer perceptron model.
            MultilayerPerceptron mlp = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
            int totalCnt = 4;
            int failCnt = 0;
            // Calculate score.
            for (int i = 0; i < 4; i++) {
                LabeledVector<double[]> pnt = trainingSet.get(i);
                Matrix predicted = mlp.predict(new DenseMatrix(new double[][] { { pnt.features().get(0), pnt.features().get(1) } }));
                double predictedVal = predicted.get(0, 0);
                double lbl = pnt.label()[0];
                System.out.printf(">>> key: %d\t\t predicted: %.4f\t\tlabel: %.4f\n", i, predictedVal, lbl);
                failCnt += Math.abs(predictedVal - lbl) < 0.5 ? 0 : 1;
            }
            double failRatio = (double) failCnt / totalCnt;
            System.out.println("\n>>> Fail percentage: " + (failRatio * 100) + "%.");
            System.out.println("\n>>> Distributed multilayer perceptron example completed.");
        } finally {
            trainingSet.destroy();
        }
    } finally {
        System.out.flush();
    }
}
Also used : MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) MLPTrainer(org.apache.ignite.ml.nn.MLPTrainer) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) SimpleGDParameterUpdate(org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix) MultilayerPerceptron(org.apache.ignite.ml.nn.MultilayerPerceptron) Matrix(org.apache.ignite.ml.math.primitives.matrix.Matrix) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix) SimpleGDUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator) Ignite(org.apache.ignite.Ignite) RendezvousAffinityFunction(org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction) CacheConfiguration(org.apache.ignite.configuration.CacheConfiguration)

Example 2 with DenseMatrix

use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.

the class MLPTrainer method updateModel.

/**
 * {@inheritDoc}
 */
@Override
protected <K, V> MultilayerPerceptron updateModel(MultilayerPerceptron lastLearnedMdl, DatasetBuilder<K, V> datasetBuilder, Preprocessor<K, V> extractor) {
    assert archSupplier != null;
    assert loss != null;
    assert updatesStgy != null;
    try (Dataset<EmptyContext, SimpleLabeledDatasetData> dataset = datasetBuilder.build(envBuilder, new EmptyContextBuilder<>(), new SimpleLabeledDatasetDataBuilder<>(extractor), learningEnvironment())) {
        MultilayerPerceptron mdl;
        if (lastLearnedMdl != null)
            mdl = lastLearnedMdl;
        else {
            MLPArchitecture arch = archSupplier.apply(dataset);
            mdl = new MultilayerPerceptron(arch, new RandomInitializer(seed));
        }
        ParameterUpdateCalculator<? super MultilayerPerceptron, P> updater = updatesStgy.getUpdatesCalculator();
        for (int i = 0; i < maxIterations; i += locIterations) {
            MultilayerPerceptron finalMdl = mdl;
            int finalI = i;
            List<P> totUp = dataset.compute(data -> {
                P update = updater.init(finalMdl, loss);
                MultilayerPerceptron mlp = Utils.copy(finalMdl);
                if (data.getFeatures() != null) {
                    List<P> updates = new ArrayList<>();
                    for (int locStep = 0; locStep < locIterations; locStep++) {
                        int[] rows = Utils.selectKDistinct(data.getRows(), Math.min(batchSize, data.getRows()), new Random(seed ^ (finalI * locStep)));
                        double[] inputsBatch = batch(data.getFeatures(), rows, data.getRows());
                        double[] groundTruthBatch = batch(data.getLabels(), rows, data.getRows());
                        Matrix inputs = new DenseMatrix(inputsBatch, rows.length, 0);
                        Matrix groundTruth = new DenseMatrix(groundTruthBatch, rows.length, 0);
                        update = updater.calculateNewUpdate(mlp, update, locStep, inputs.transpose(), groundTruth.transpose());
                        mlp = updater.update(mlp, update);
                        updates.add(update);
                    }
                    List<P> res = new ArrayList<>();
                    res.add(updatesStgy.locStepUpdatesReducer().apply(updates));
                    return res;
                }
                return null;
            }, (a, b) -> {
                if (a == null)
                    return b;
                else if (b == null)
                    return a;
                else {
                    a.addAll(b);
                    return a;
                }
            });
            if (totUp == null)
                return getLastTrainedModelOrThrowEmptyDatasetException(lastLearnedMdl);
            P update = updatesStgy.allUpdatesReducer().apply(totUp);
            mdl = updater.update(mdl, update);
        }
        return mdl;
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : EmptyContext(org.apache.ignite.ml.dataset.primitive.context.EmptyContext) SimpleLabeledDatasetData(org.apache.ignite.ml.dataset.primitive.data.SimpleLabeledDatasetData) MLPArchitecture(org.apache.ignite.ml.nn.architecture.MLPArchitecture) ArrayList(java.util.ArrayList) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix) Matrix(org.apache.ignite.ml.math.primitives.matrix.Matrix) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix) Random(java.util.Random) RandomInitializer(org.apache.ignite.ml.nn.initializers.RandomInitializer)

Example 3 with DenseMatrix

use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.

the class ReplicatedVectorMatrix method assignColumn.

/**
 * {@inheritDoc}
 */
@Override
public Matrix assignColumn(int col, Vector vec) {
    int rows = asCol ? vector.size() : replicationCnt;
    int cols = asCol ? replicationCnt : vector.size();
    int times = asCol ? cols : rows;
    Matrix res = new DenseMatrix(rows, cols);
    IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow;
    IgniteBiConsumer<Integer, Vector> assigner = res::assignColumn;
    assign(replicantAssigner, assigner, vector, vec, times, col);
    return res;
}
Also used : Matrix(org.apache.ignite.ml.math.primitives.matrix.Matrix) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix)

Example 4 with DenseMatrix

use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.

the class MultilayerPerceptron method initLayers.

/**
 * Init layers parameters with initializer.
 *
 * @param initializer Parameters initializer.
 */
private void initLayers(MLPInitializer initializer) {
    int prevSize = architecture.inputSize();
    for (int i = 1; i < architecture.layersCount(); i++) {
        TransformationLayerArchitecture layerCfg = architecture.transformationLayerArchitecture(i);
        int neuronsCnt = layerCfg.neuronsCount();
        DenseMatrix weights = new DenseMatrix(neuronsCnt, prevSize);
        initializer.initWeights(weights);
        DenseVector biases = null;
        if (layerCfg.hasBias()) {
            biases = new DenseVector(neuronsCnt);
            initializer.initBiases(biases);
        }
        layers.add(new MLPLayer(weights, biases));
        prevSize = layerCfg.neuronsCount();
    }
}
Also used : DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector) TransformationLayerArchitecture(org.apache.ignite.ml.nn.architecture.TransformationLayerArchitecture) DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix)

Example 5 with DenseMatrix

use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.

the class MatrixUtil method fromList.

/**
 */
public static DenseMatrix fromList(List<Vector> vecs, boolean entriesAreRows) {
    GridArgumentCheck.notEmpty(vecs, "vecs");
    int dim = vecs.get(0).size();
    int vecsSize = vecs.size();
    DenseMatrix res = new DenseMatrix(entriesAreRows ? vecsSize : dim, entriesAreRows ? dim : vecsSize);
    for (int i = 0; i < vecsSize; i++) {
        for (int j = 0; j < dim; j++) {
            int r = entriesAreRows ? i : j;
            int c = entriesAreRows ? j : i;
            res.setX(r, c, vecs.get(i).get(j));
        }
    }
    return res;
}
Also used : DenseMatrix(org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix)

Aggregations

DenseMatrix (org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix)39 Test (org.junit.Test)28 Matrix (org.apache.ignite.ml.math.primitives.matrix.Matrix)14 DenseVector (org.apache.ignite.ml.math.primitives.vector.impl.DenseVector)14 MLPArchitecture (org.apache.ignite.ml.nn.architecture.MLPArchitecture)9 Vector (org.apache.ignite.ml.math.primitives.vector.Vector)8 SparseVector (org.apache.ignite.ml.math.primitives.vector.impl.SparseVector)4 MultivariateGaussianDistribution (org.apache.ignite.ml.math.stat.MultivariateGaussianDistribution)3 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)3 RendezvousAffinityFunction (org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction)2 CacheConfiguration (org.apache.ignite.configuration.CacheConfiguration)2 EuclideanDistance (org.apache.ignite.ml.math.distances.EuclideanDistance)2 HammingDistance (org.apache.ignite.ml.math.distances.HammingDistance)2 ManhattanDistance (org.apache.ignite.ml.math.distances.ManhattanDistance)2 ViewMatrix (org.apache.ignite.ml.math.primitives.matrix.impl.ViewMatrix)2 VectorizedViewMatrix (org.apache.ignite.ml.math.primitives.vector.impl.VectorizedViewMatrix)2 MLPTrainer (org.apache.ignite.ml.nn.MLPTrainer)2 SimpleGDParameterUpdate (org.apache.ignite.ml.optimization.updatecalculators.SimpleGDParameterUpdate)2 SimpleGDUpdateCalculator (org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator)2 Path (java.nio.file.Path)1