use of org.apache.ignite.ml.knn.classification.KNNClassificationTrainer in project ignite by apache.
the class Step_6_KNN method main.
/**
* Run example.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Tutorial step 6 (kNN) example started.");
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
try {
IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
// Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
6).fit(ignite, dataCache, vectorizer);
Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(1).withWeighted(true);
// Train decision tree model.
KNNClassificationModel mdl = trainer.fit(ignite, dataCache, normalizationPreprocessor);
System.out.println("\n>>> Trained model: " + mdl);
double accuracy = Evaluator.evaluate(dataCache, mdl, normalizationPreprocessor, new Accuracy<>());
System.out.println("\n>>> Accuracy " + accuracy);
System.out.println("\n>>> Test Error " + (1 - accuracy));
System.out.println(">>> Tutorial step 6 (kNN) example completed.");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.knn.classification.KNNClassificationTrainer in project ignite by apache.
the class KNNClassificationTest method testBinaryClassificationFarPointsWithSimpleStrategy.
/**
*/
@Test
public void testBinaryClassificationFarPointsWithSimpleStrategy() {
Map<Integer, double[]> data = new HashMap<>();
data.put(0, new double[] { 10.0, 10.0, 1.0 });
data.put(1, new double[] { 10.0, 20.0, 1.0 });
data.put(2, new double[] { -1, -1, 1.0 });
data.put(3, new double[] { -2, -2, 2.0 });
data.put(4, new double[] { -1.0, -2.0, 2.0 });
data.put(5, new double[] { -2.0, -1.0, 2.0 });
KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(false);
KNNClassificationModel knnMdl = trainer.fit(data, parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
assertEquals(2.0, knnMdl.predict(VectorUtils.of(-1.01, -1.01)), 0);
}
use of org.apache.ignite.ml.knn.classification.KNNClassificationTrainer in project ignite by apache.
the class KNNClassificationTest method testBinaryClassification.
/**
*/
@Test
public void testBinaryClassification() {
Map<Integer, double[]> data = new HashMap<>();
data.put(0, new double[] { 1.0, 1.0, 1.0 });
data.put(1, new double[] { 1.0, 2.0, 1.0 });
data.put(2, new double[] { 2.0, 1.0, 1.0 });
data.put(3, new double[] { -1.0, -1.0, 2.0 });
data.put(4, new double[] { -1.0, -2.0, 2.0 });
data.put(5, new double[] { -2.0, -1.0, 2.0 });
KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(false);
KNNClassificationModel knnMdl = trainer.fit(data, parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
assertTrue(!knnMdl.toString().isEmpty());
assertTrue(!knnMdl.toString(true).isEmpty());
assertTrue(!knnMdl.toString(false).isEmpty());
Vector firstVector = VectorUtils.of(2.0, 2.0);
assertEquals(1.0, knnMdl.predict(firstVector), 0);
Vector secondVector = VectorUtils.of(-2.0, -2.0);
assertEquals(2.0, knnMdl.predict(secondVector), 0);
}
use of org.apache.ignite.ml.knn.classification.KNNClassificationTrainer in project ignite by apache.
the class BinaryClassificationEvaluatorTest method testEvaluatorWithoutFilter.
/**
* Test evaluator and trainer on classification model y = x.
*/
@Test
public void testEvaluatorWithoutFilter() {
Map<Integer, Vector> cacheMock = new HashMap<>();
for (int i = 0; i < twoLinearlySeparableClasses.length; i++) cacheMock.put(i, VectorUtils.of(twoLinearlySeparableClasses[i]));
KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3);
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
KNNClassificationModel mdl = trainer.fit(cacheMock, parts, vectorizer);
double score = Evaluator.evaluate(cacheMock, mdl, vectorizer, MetricName.ACCURACY);
assertEquals(0.9919839679358717, score, 1e-12);
}
use of org.apache.ignite.ml.knn.classification.KNNClassificationTrainer in project ignite by apache.
the class KNNClassificationExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> kNN multi-class classification algorithm over cached dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(true).withDataTtl(60);
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
KNNClassificationModel mdl = trainer.fit(ignite, dataCache, vectorizer);
double accuracy = Evaluator.evaluate(dataCache, mdl, vectorizer, MetricName.ACCURACY);
System.out.println("\n>>> Accuracy " + accuracy);
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
Aggregations