use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class MnistDistributed method asLabeledVector.
/**
* Transform vector created by {@link MnistUtils} to {@link LabeledVector}.
*
* @param v Vector to transform.
* @param featsCnt Count of features.
* @return Vector created by {@link MnistUtils} transformed to {@link LabeledVector}.
*/
private static LabeledVector<Vector, Vector> asLabeledVector(Vector v, int featsCnt) {
Vector features = VectorUtils.copyPart(v, 0, featsCnt);
Vector lb = VectorUtils.num2Vec((int) v.get(featsCnt), 10);
return new LabeledVector<>(features, lb);
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class MnistDistributed method testMNISTDistributed.
/**
*/
public void testMNISTDistributed() throws IOException {
int samplesCnt = 60_000;
int hiddenNeuronsCnt = 100;
IgniteBiTuple<Stream<DenseLocalOnHeapVector>, Stream<DenseLocalOnHeapVector>> trainingAndTest = loadMnist(samplesCnt);
// Load training mnist part into a cache.
Stream<DenseLocalOnHeapVector> trainingMnist = trainingAndTest.get1();
List<DenseLocalOnHeapVector> trainingMnistLst = trainingMnist.collect(Collectors.toList());
IgniteCache<Integer, LabeledVector<Vector, Vector>> labeledVectorsCache = LabeledVectorsCache.createNew(ignite);
loadIntoCache(trainingMnistLst, labeledVectorsCache);
MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withMaxGlobalSteps(35).withSyncPeriod(2);
MLPArchitecture arch = new MLPArchitecture(FEATURES_CNT).withAddedLayer(hiddenNeuronsCnt, true, Activators.SIGMOID).withAddedLayer(10, false, Activators.SIGMOID);
MultilayerPerceptron mdl = trainer.train(new MLPGroupUpdateTrainerCacheInput(arch, 9, labeledVectorsCache, 2000));
IgniteBiTuple<Matrix, Matrix> testDs = createDataset(trainingAndTest.get2(), 10_000, FEATURES_CNT);
Vector truth = testDs.get2().foldColumns(VectorUtils::vec2Num);
Vector predicted = mdl.apply(testDs.get1()).foldColumns(VectorUtils::vec2Num);
Tracer.showAscii(truth);
Tracer.showAscii(predicted);
X.println("Accuracy: " + VectorUtils.zipWith(predicted, truth, (x, y) -> x.equals(y) ? 1.0 : 0.0).sum() / truth.size() * 100 + "%.");
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class MLPGroupUpdateTrainerCacheInput method batchSupplier.
/**
* {@inheritDoc}
*/
@Override
public IgniteSupplier<IgniteBiTuple<Matrix, Matrix>> batchSupplier() {
String cName = cache.getName();
// This line is for prohibiting of 'this' object be caught into serialization context of lambda.
int bs = batchSize;
// This line is for prohibiting of 'this' object be caught into serialization context of lambda.
Random r = rand;
return () -> {
Ignite ignite = Ignition.localIgnite();
IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = ignite.getOrCreateCache(cName);
int total = cache.size();
Affinity<Integer> affinity = ignite.affinity(cName);
List<Integer> allKeys = IntStream.range(0, total).boxed().collect(Collectors.toList());
List<Integer> keys = new ArrayList<>(affinity.mapKeysToNodes(allKeys).get(ignite.cluster().localNode()));
int locKeysCnt = keys.size();
int[] selected = Utils.selectKDistinct(locKeysCnt, Math.min(bs, locKeysCnt), r);
// Get dimensions of vectors in cache. We suppose that every feature vector has
// same dimension d 1 and every label has the same dimension d2.
LabeledVector<Vector, Vector> dimEntry = cache.get(keys.get(selected[0]));
Matrix inputs = new DenseLocalOnHeapMatrix(dimEntry.features().size(), bs);
Matrix groundTruth = new DenseLocalOnHeapMatrix(dimEntry.label().size(), bs);
for (int i = 0; i < selected.length; i++) {
LabeledVector<Vector, Vector> labeled = cache.get(keys.get(selected[i]));
inputs.assignColumn(i, labeled.features());
groundTruth.assignColumn(i, labeled.label());
}
return new IgniteBiTuple<>(inputs, groundTruth);
};
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class KNNModel method classify.
/**
*/
private double classify(LabeledVector[] neighbors, Vector v, KNNStrategy stgy) {
Map<Double, Double> clsVotes = new HashMap<>();
for (int i = 0; i < neighbors.length; i++) {
LabeledVector neighbor = neighbors[i];
double clsLb = (double) neighbor.label();
double distance = cachedDistances != null ? cachedDistances[i] : distanceMeasure.compute(v, neighbor.features());
if (clsVotes.containsKey(clsLb)) {
double clsVote = clsVotes.get(clsLb);
clsVote += getClassVoteForVector(stgy, distance);
clsVotes.put(clsLb, clsVote);
} else {
final double val = getClassVoteForVector(stgy, distance);
clsVotes.put(clsLb, val);
}
}
return getClassWithMaxVotes(clsVotes);
}
Aggregations