use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.
the class MLPTrainerExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
// IMPL NOTE based on MLPGroupTrainerTest#testXOR
System.out.println(">>> Distributed multilayer perceptron example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
// Create cache with training data.
CacheConfiguration<Integer, LabeledVector<double[]>> trainingSetCfg = new CacheConfiguration<>();
trainingSetCfg.setName("TRAINING_SET");
trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
IgniteCache<Integer, LabeledVector<double[]>> trainingSet = null;
try {
trainingSet = ignite.createCache(trainingSetCfg);
// Fill cache with training data.
trainingSet.put(0, new LabeledVector<>(VectorUtils.of(0, 0), new double[] { 0 }));
trainingSet.put(1, new LabeledVector<>(VectorUtils.of(0, 1), new double[] { 1 }));
trainingSet.put(2, new LabeledVector<>(VectorUtils.of(1, 0), new double[] { 1 }));
trainingSet.put(3, new LabeledVector<>(VectorUtils.of(1, 1), new double[] { 0 }));
// Define a layered architecture.
MLPArchitecture arch = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
// Define a neural network trainer.
MLPTrainer<SimpleGDParameterUpdate> trainer = new MLPTrainer<>(arch, LossFunctions.MSE, new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.1), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG), 3000, 4, 50, 123L);
// Train neural network and get multilayer perceptron model.
MultilayerPerceptron mlp = trainer.fit(ignite, trainingSet, new LabeledDummyVectorizer<>());
int totalCnt = 4;
int failCnt = 0;
// Calculate score.
for (int i = 0; i < 4; i++) {
LabeledVector<double[]> pnt = trainingSet.get(i);
Matrix predicted = mlp.predict(new DenseMatrix(new double[][] { { pnt.features().get(0), pnt.features().get(1) } }));
double predictedVal = predicted.get(0, 0);
double lbl = pnt.label()[0];
System.out.printf(">>> key: %d\t\t predicted: %.4f\t\tlabel: %.4f\n", i, predictedVal, lbl);
failCnt += Math.abs(predictedVal - lbl) < 0.5 ? 0 : 1;
}
double failRatio = (double) failCnt / totalCnt;
System.out.println("\n>>> Fail percentage: " + (failRatio * 100) + "%.");
System.out.println("\n>>> Distributed multilayer perceptron example completed.");
} finally {
trainingSet.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.
the class MLPTrainer method updateModel.
/**
* {@inheritDoc}
*/
@Override
protected <K, V> MultilayerPerceptron updateModel(MultilayerPerceptron lastLearnedMdl, DatasetBuilder<K, V> datasetBuilder, Preprocessor<K, V> extractor) {
assert archSupplier != null;
assert loss != null;
assert updatesStgy != null;
try (Dataset<EmptyContext, SimpleLabeledDatasetData> dataset = datasetBuilder.build(envBuilder, new EmptyContextBuilder<>(), new SimpleLabeledDatasetDataBuilder<>(extractor), learningEnvironment())) {
MultilayerPerceptron mdl;
if (lastLearnedMdl != null)
mdl = lastLearnedMdl;
else {
MLPArchitecture arch = archSupplier.apply(dataset);
mdl = new MultilayerPerceptron(arch, new RandomInitializer(seed));
}
ParameterUpdateCalculator<? super MultilayerPerceptron, P> updater = updatesStgy.getUpdatesCalculator();
for (int i = 0; i < maxIterations; i += locIterations) {
MultilayerPerceptron finalMdl = mdl;
int finalI = i;
List<P> totUp = dataset.compute(data -> {
P update = updater.init(finalMdl, loss);
MultilayerPerceptron mlp = Utils.copy(finalMdl);
if (data.getFeatures() != null) {
List<P> updates = new ArrayList<>();
for (int locStep = 0; locStep < locIterations; locStep++) {
int[] rows = Utils.selectKDistinct(data.getRows(), Math.min(batchSize, data.getRows()), new Random(seed ^ (finalI * locStep)));
double[] inputsBatch = batch(data.getFeatures(), rows, data.getRows());
double[] groundTruthBatch = batch(data.getLabels(), rows, data.getRows());
Matrix inputs = new DenseMatrix(inputsBatch, rows.length, 0);
Matrix groundTruth = new DenseMatrix(groundTruthBatch, rows.length, 0);
update = updater.calculateNewUpdate(mlp, update, locStep, inputs.transpose(), groundTruth.transpose());
mlp = updater.update(mlp, update);
updates.add(update);
}
List<P> res = new ArrayList<>();
res.add(updatesStgy.locStepUpdatesReducer().apply(updates));
return res;
}
return null;
}, (a, b) -> {
if (a == null)
return b;
else if (b == null)
return a;
else {
a.addAll(b);
return a;
}
});
if (totUp == null)
return getLastTrainedModelOrThrowEmptyDatasetException(lastLearnedMdl);
P update = updatesStgy.allUpdatesReducer().apply(totUp);
mdl = updater.update(mdl, update);
}
return mdl;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.
the class ReplicatedVectorMatrix method assignColumn.
/**
* {@inheritDoc}
*/
@Override
public Matrix assignColumn(int col, Vector vec) {
int rows = asCol ? vector.size() : replicationCnt;
int cols = asCol ? replicationCnt : vector.size();
int times = asCol ? cols : rows;
Matrix res = new DenseMatrix(rows, cols);
IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow;
IgniteBiConsumer<Integer, Vector> assigner = res::assignColumn;
assign(replicantAssigner, assigner, vector, vec, times, col);
return res;
}
use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.
the class MultilayerPerceptron method initLayers.
/**
* Init layers parameters with initializer.
*
* @param initializer Parameters initializer.
*/
private void initLayers(MLPInitializer initializer) {
int prevSize = architecture.inputSize();
for (int i = 1; i < architecture.layersCount(); i++) {
TransformationLayerArchitecture layerCfg = architecture.transformationLayerArchitecture(i);
int neuronsCnt = layerCfg.neuronsCount();
DenseMatrix weights = new DenseMatrix(neuronsCnt, prevSize);
initializer.initWeights(weights);
DenseVector biases = null;
if (layerCfg.hasBias()) {
biases = new DenseVector(neuronsCnt);
initializer.initBiases(biases);
}
layers.add(new MLPLayer(weights, biases));
prevSize = layerCfg.neuronsCount();
}
}
use of org.apache.ignite.ml.math.primitives.matrix.impl.DenseMatrix in project ignite by apache.
the class MatrixUtil method fromList.
/**
*/
public static DenseMatrix fromList(List<Vector> vecs, boolean entriesAreRows) {
GridArgumentCheck.notEmpty(vecs, "vecs");
int dim = vecs.get(0).size();
int vecsSize = vecs.size();
DenseMatrix res = new DenseMatrix(entriesAreRows ? vecsSize : dim, entriesAreRows ? dim : vecsSize);
for (int i = 0; i < vecsSize; i++) {
for (int j = 0; j < dim; j++) {
int r = entriesAreRows ? i : j;
int c = entriesAreRows ? j : i;
res.setX(r, c, vecs.get(i).get(j));
}
}
return res;
}
Aggregations