use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class Deltas method calculateUpdates.
/**
*/
private Vector calculateUpdates(Vector weights, Dataset<SVMPartitionContext, LabeledDataset<Double, LabeledVector>> dataset) {
return dataset.compute(data -> {
Vector copiedWeights = weights.copy();
Vector deltaWeights = initializeWeightsWithZeros(weights.size());
final int amountOfObservation = data.rowSize();
Vector tmpAlphas = initializeWeightsWithZeros(amountOfObservation);
Vector deltaAlphas = initializeWeightsWithZeros(amountOfObservation);
for (int i = 0; i < this.getAmountOfLocIterations(); i++) {
int randomIdx = ThreadLocalRandom.current().nextInt(amountOfObservation);
Deltas deltas = getDeltas(data, copiedWeights, amountOfObservation, tmpAlphas, randomIdx);
// creates new vector
copiedWeights = copiedWeights.plus(deltas.deltaWeights);
// creates new vector
deltaWeights = deltaWeights.plus(deltas.deltaWeights);
tmpAlphas.set(randomIdx, tmpAlphas.get(randomIdx) + deltas.deltaAlpha);
deltaAlphas.set(randomIdx, deltaAlphas.get(randomIdx) + deltas.deltaAlpha);
}
return deltaWeights;
}, (a, b) -> a == null ? b : a.plus(b));
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class Deltas method makeVectorWithInterceptElement.
/**
*/
private Vector makeVectorWithInterceptElement(LabeledVector row) {
Vector vec = row.features().like(row.features().size() + 1);
// set intercept element
vec.set(0, 1);
for (int j = 0; j < row.features().size(); j++) vec.set(j + 1, row.features().get(j));
return vec;
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class Deltas method getDeltas.
/**
*/
private Deltas getDeltas(LabeledDataset data, Vector copiedWeights, int amountOfObservation, Vector tmpAlphas, int randomIdx) {
LabeledVector row = (LabeledVector) data.getRow(randomIdx);
Double lb = (Double) row.label();
Vector v = makeVectorWithInterceptElement(row);
double alpha = tmpAlphas.get(randomIdx);
return maximize(lb, v, alpha, copiedWeights, amountOfObservation);
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class KNNModel method getDistances.
/**
* Computes distances between given vector and each vector in training dataset.
*
* @param v The given vector.
* @param trainingData The training dataset.
* @return Key - distanceMeasure from given features before features with idx stored in value. Value is presented
* with Set because there can be a few vectors with the same distance.
*/
@NotNull
private TreeMap<Double, Set<Integer>> getDistances(Vector v, LabeledVector[] trainingData) {
TreeMap<Double, Set<Integer>> distanceIdxPairs = new TreeMap<>();
for (int i = 0; i < trainingData.length; i++) {
LabeledVector labeledVector = trainingData[i];
if (labeledVector != null) {
double distance = distanceMeasure.compute(v, labeledVector.features());
putDistanceIdxPair(distanceIdxPairs, i, distance);
}
}
return distanceIdxPairs;
}
use of org.apache.ignite.ml.structures.LabeledVector in project ignite by apache.
the class MLPGroupTrainerTest method doTestXOR.
/**
* Test training of 'xor' by {@link MLPGroupUpdateTrainer}.
*/
private <U extends Serializable> void doTestXOR(UpdatesStrategy<? super MultilayerPerceptron, U> stgy) {
int samplesCnt = 1000;
Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = LabeledVectorsCache.createNew(ignite);
String cacheName = cache.getName();
Random rnd = new Random(12345L);
try (IgniteDataStreamer<Integer, LabeledVector<Vector, Vector>> streamer = ignite.dataStreamer(cacheName)) {
streamer.perNodeBufferSize(10000);
for (int i = 0; i < samplesCnt; i++) {
int col = Math.abs(rnd.nextInt()) % 4;
streamer.addData(i, new LabeledVector<>(xorInputs.getCol(col), xorOutputs.getCol(col)));
}
}
int totalCnt = 30;
int failCnt = 0;
double maxFailRatio = 0.3;
MLPGroupUpdateTrainer<U> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withSyncPeriod(3).withTolerance(0.001).withMaxGlobalSteps(100).withUpdateStrategy(stgy);
for (int i = 0; i < totalCnt; i++) {
MLPGroupUpdateTrainerCacheInput trainerInput = new MLPGroupUpdateTrainerCacheInput(conf, new RandomInitializer(new Random(123L + i)), 6, cache, 10, new Random(123L + i));
MultilayerPerceptron mlp = trainer.train(trainerInput);
Matrix predict = mlp.apply(xorInputs);
Tracer.showAscii(predict);
X.println(xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2) + "");
failCnt += TestUtils.checkIsInEpsilonNeighbourhoodBoolean(xorOutputs.getRow(0), predict.getRow(0), 5E-1) ? 0 : 1;
}
double failRatio = (double) failCnt / totalCnt;
System.out.println("Fail percentage: " + (failRatio * 100) + "%.");
assertTrue(failRatio < maxFailRatio);
}
Aggregations