use of org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector in project ignite by apache.
the class SVMMultiClassClassificationExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws InterruptedException {
System.out.println();
System.out.println(">>> SVM Multi-class classification model over cached dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), SVMMultiClassClassificationExample.class.getSimpleName(), () -> {
IgniteCache<Integer, double[]> dataCache = getTestCache(ignite);
SVMLinearMultiClassClassificationTrainer<Integer, double[]> trainer = new SVMLinearMultiClassClassificationTrainer<>();
SVMLinearMultiClassClassificationModel mdl = trainer.fit(new CacheBasedDatasetBuilder<>(ignite, dataCache), (k, v) -> Arrays.copyOfRange(v, 1, v.length), (k, v) -> v[0], 5);
System.out.println(">>> SVM Multi-class model");
System.out.println(mdl.toString());
NormalizationTrainer<Integer, double[]> normalizationTrainer = new NormalizationTrainer<>();
NormalizationPreprocessor<Integer, double[]> preprocessor = normalizationTrainer.fit(new CacheBasedDatasetBuilder<>(ignite, dataCache), (k, v) -> Arrays.copyOfRange(v, 1, v.length), 5);
SVMLinearMultiClassClassificationModel mdlWithNormalization = trainer.fit(new CacheBasedDatasetBuilder<>(ignite, dataCache), preprocessor, (k, v) -> v[0], 5);
System.out.println(">>> SVM Multi-class model with normalization");
System.out.println(mdlWithNormalization.toString());
System.out.println(">>> ----------------------------------------------------------------");
System.out.println(">>> | Prediction\t| Prediction with Normalization\t| Ground Truth\t|");
System.out.println(">>> ----------------------------------------------------------------");
int amountOfErrors = 0;
int amountOfErrorsWithNormalization = 0;
int totalAmount = 0;
// Build confusion matrix. See https://en.wikipedia.org/wiki/Confusion_matrix
int[][] confusionMtx = { { 0, 0, 0 }, { 0, 0, 0 }, { 0, 0, 0 } };
int[][] confusionMtxWithNormalization = { { 0, 0, 0 }, { 0, 0, 0 }, { 0, 0, 0 } };
try (QueryCursor<Cache.Entry<Integer, double[]>> observations = dataCache.query(new ScanQuery<>())) {
for (Cache.Entry<Integer, double[]> observation : observations) {
double[] val = observation.getValue();
double[] inputs = Arrays.copyOfRange(val, 1, val.length);
double groundTruth = val[0];
double prediction = mdl.apply(new DenseLocalOnHeapVector(inputs));
double predictionWithNormalization = mdlWithNormalization.apply(new DenseLocalOnHeapVector(inputs));
totalAmount++;
// Collect data for model
if (groundTruth != prediction)
amountOfErrors++;
int idx1 = (int) prediction == 1 ? 0 : ((int) prediction == 3 ? 1 : 2);
int idx2 = (int) groundTruth == 1 ? 0 : ((int) groundTruth == 3 ? 1 : 2);
confusionMtx[idx1][idx2]++;
// Collect data for model with normalization
if (groundTruth != predictionWithNormalization)
amountOfErrorsWithNormalization++;
idx1 = (int) predictionWithNormalization == 1 ? 0 : ((int) predictionWithNormalization == 3 ? 1 : 2);
idx2 = (int) groundTruth == 1 ? 0 : ((int) groundTruth == 3 ? 1 : 2);
confusionMtxWithNormalization[idx1][idx2]++;
System.out.printf(">>> | %.4f\t\t| %.4f\t\t\t\t\t\t| %.4f\t\t|\n", prediction, predictionWithNormalization, groundTruth);
}
System.out.println(">>> ----------------------------------------------------------------");
System.out.println("\n>>> -----------------SVM model-------------");
System.out.println("\n>>> Absolute amount of errors " + amountOfErrors);
System.out.println("\n>>> Accuracy " + (1 - amountOfErrors / (double) totalAmount));
System.out.println("\n>>> Confusion matrix is " + Arrays.deepToString(confusionMtx));
System.out.println("\n>>> -----------------SVM model with Normalization-------------");
System.out.println("\n>>> Absolute amount of errors " + amountOfErrorsWithNormalization);
System.out.println("\n>>> Accuracy " + (1 - amountOfErrorsWithNormalization / (double) totalAmount));
System.out.println("\n>>> Confusion matrix is " + Arrays.deepToString(confusionMtxWithNormalization));
}
});
igniteThread.start();
igniteThread.join();
}
}
use of org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector in project ignite by apache.
the class VectorExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Basic Vector API usage example started.");
System.out.println("\n>>> Creating perpendicular vectors.");
double[] data1 = new double[] { 1, 0, 3, 0, 5, 0 };
double[] data2 = new double[] { 0, 2, 0, 4, 0, 6 };
Vector v1 = new DenseLocalOnHeapVector(data1);
Vector v2 = new DenseLocalOnHeapVector(data2);
System.out.println(">>> First vector: " + Arrays.toString(data1));
System.out.println(">>> Second vector: " + Arrays.toString(data2));
double dotProduct = v1.dot(v2);
boolean dotProductIsAsExp = dotProduct == 0;
System.out.println("\n>>> Dot product of vectors: [" + dotProduct + "], it is 0 as expected: [" + dotProductIsAsExp + "].");
Vector hypotenuse = v1.plus(v2);
System.out.println("\n>>> Hypotenuse (sum of vectors): " + Arrays.toString(hypotenuse.getStorage().data()));
double lenSquared1 = v1.getLengthSquared();
double lenSquared2 = v2.getLengthSquared();
double lenSquaredHypotenuse = hypotenuse.getLengthSquared();
boolean lenSquaredHypotenuseIsAsExp = lenSquaredHypotenuse == lenSquared1 + lenSquared2;
System.out.println(">>> Squared length of first vector: [" + lenSquared1 + "].");
System.out.println(">>> Squared length of second vector: [" + lenSquared2 + "].");
System.out.println(">>> Squared length of hypotenuse: [" + lenSquaredHypotenuse + "], equals sum of squared lengths of two original vectors as expected: [" + lenSquaredHypotenuseIsAsExp + "].");
System.out.println("\n>>> Basic Vector API usage example completed.");
}
use of org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector in project ignite by apache.
the class FuzzyCMeansDistributedClusterer method calculateMembership.
/**
* Calculate matrix of membership coefficients for each point and each center.
*
* @param points Matrix with source points.
* @param centers Array of current centers.
* @return Membership matrix and sums of membership coefficients for each center.
*/
private MembershipsAndSums calculateMembership(SparseDistributedMatrix points, Vector[] centers) {
String cacheName = ((SparseDistributedMatrixStorage) points.getStorage()).cacheName();
UUID uuid = points.getUUID();
double fuzzyMembershipCoefficient = 2 / (exponentialWeight - 1);
MembershipsAndSumsSupplier supplier = new MembershipsAndSumsSupplier(centers.length);
return CacheUtils.distributedFold(cacheName, (IgniteBiFunction<Cache.Entry<SparseMatrixKey, ConcurrentHashMap<Integer, Double>>, MembershipsAndSums, MembershipsAndSums>) (vectorWithIndex, membershipsAndSums) -> {
Integer idx = vectorWithIndex.getKey().index();
Vector pnt = VectorUtils.fromMap(vectorWithIndex.getValue(), false);
Vector distances = new DenseLocalOnHeapVector(centers.length);
Vector pntMemberships = new DenseLocalOnHeapVector(centers.length);
for (int i = 0; i < centers.length; i++) distances.setX(i, distance(centers[i], pnt));
for (int i = 0; i < centers.length; i++) {
double invertedFuzzyWeight = 0.0;
for (int j = 0; j < centers.length; j++) {
double val = Math.pow(distances.getX(i) / distances.getX(j), fuzzyMembershipCoefficient);
if (Double.isNaN(val))
val = 1.0;
invertedFuzzyWeight += val;
}
double membership = Math.pow(1.0 / invertedFuzzyWeight, exponentialWeight);
pntMemberships.setX(i, membership);
}
membershipsAndSums.memberships.put(idx, pntMemberships);
membershipsAndSums.membershipSums = membershipsAndSums.membershipSums.plus(pntMemberships);
return membershipsAndSums;
}, key -> key.dataStructureId().equals(uuid), (mem1, mem2) -> {
mem1.merge(mem2);
return mem1;
}, supplier);
}
use of org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector in project ignite by apache.
the class LocalModelsTest method importExportLinearRegressionModelTest.
/**
*/
@Test
public void importExportLinearRegressionModelTest() throws IOException {
executeModelTest(mdlFilePath -> {
LinearRegressionModel model = new LinearRegressionModel(new DenseLocalOnHeapVector(new double[] { 1, 2 }), 3);
Exporter<LinearRegressionModel, String> exporter = new FileExporter<>();
model.saveModel(exporter, mdlFilePath);
LinearRegressionModel load = exporter.load(mdlFilePath);
Assert.assertNotNull(load);
Assert.assertEquals("", model, load);
return null;
});
}
use of org.apache.ignite.ml.math.impls.vector.DenseLocalOnHeapVector in project ignite by apache.
the class LocalModelsTest method importExportSVMMulticlassClassificationModelTest.
/**
*/
@Test
public void importExportSVMMulticlassClassificationModelTest() throws IOException {
executeModelTest(mdlFilePath -> {
SVMLinearBinaryClassificationModel binaryMdl1 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[] { 1, 2 }), 3);
SVMLinearBinaryClassificationModel binaryMdl2 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[] { 2, 3 }), 4);
SVMLinearBinaryClassificationModel binaryMdl3 = new SVMLinearBinaryClassificationModel(new DenseLocalOnHeapVector(new double[] { 3, 4 }), 5);
SVMLinearMultiClassClassificationModel mdl = new SVMLinearMultiClassClassificationModel();
mdl.add(1, binaryMdl1);
mdl.add(2, binaryMdl2);
mdl.add(3, binaryMdl3);
Exporter<SVMLinearMultiClassClassificationModel, String> exporter = new FileExporter<>();
mdl.saveModel(exporter, mdlFilePath);
SVMLinearMultiClassClassificationModel load = exporter.load(mdlFilePath);
Assert.assertNotNull(load);
Assert.assertEquals("", mdl, load);
return null;
});
}
Aggregations