Search in sources :

Example 1 with KNNClassificationModel

use of org.apache.ignite.ml.knn.classification.KNNClassificationModel in project ignite by apache.

the class Step_6_KNN method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 6 (kNN) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
            6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
            KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(1).withWeighted(true);
            // Train decision tree model.
            KNNClassificationModel mdl = trainer.fit(ignite, dataCache, normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + mdl);
            double accuracy = Evaluator.evaluate(dataCache, mdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 6 (kNN) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) FileNotFoundException(java.io.FileNotFoundException) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector)

Example 2 with KNNClassificationModel

use of org.apache.ignite.ml.knn.classification.KNNClassificationModel in project ignite by apache.

the class KNNClassificationTest method testBinaryClassificationFarPointsWithSimpleStrategy.

/**
 */
@Test
public void testBinaryClassificationFarPointsWithSimpleStrategy() {
    Map<Integer, double[]> data = new HashMap<>();
    data.put(0, new double[] { 10.0, 10.0, 1.0 });
    data.put(1, new double[] { 10.0, 20.0, 1.0 });
    data.put(2, new double[] { -1, -1, 1.0 });
    data.put(3, new double[] { -2, -2, 2.0 });
    data.put(4, new double[] { -1.0, -2.0, 2.0 });
    data.put(5, new double[] { -2.0, -1.0, 2.0 });
    KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(false);
    KNNClassificationModel knnMdl = trainer.fit(data, parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
    assertEquals(2.0, knnMdl.predict(VectorUtils.of(-1.01, -1.01)), 0);
}
Also used : EuclideanDistance(org.apache.ignite.ml.math.distances.EuclideanDistance) DoubleArrayVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer) HashMap(java.util.HashMap) KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Test(org.junit.Test)

Example 3 with KNNClassificationModel

use of org.apache.ignite.ml.knn.classification.KNNClassificationModel in project ignite by apache.

the class KNNClassificationTest method testBinaryClassification.

/**
 */
@Test
public void testBinaryClassification() {
    Map<Integer, double[]> data = new HashMap<>();
    data.put(0, new double[] { 1.0, 1.0, 1.0 });
    data.put(1, new double[] { 1.0, 2.0, 1.0 });
    data.put(2, new double[] { 2.0, 1.0, 1.0 });
    data.put(3, new double[] { -1.0, -1.0, 2.0 });
    data.put(4, new double[] { -1.0, -2.0, 2.0 });
    data.put(5, new double[] { -2.0, -1.0, 2.0 });
    KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(false);
    KNNClassificationModel knnMdl = trainer.fit(data, parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
    assertTrue(!knnMdl.toString().isEmpty());
    assertTrue(!knnMdl.toString(true).isEmpty());
    assertTrue(!knnMdl.toString(false).isEmpty());
    Vector firstVector = VectorUtils.of(2.0, 2.0);
    assertEquals(1.0, knnMdl.predict(firstVector), 0);
    Vector secondVector = VectorUtils.of(-2.0, -2.0);
    assertEquals(2.0, knnMdl.predict(secondVector), 0);
}
Also used : EuclideanDistance(org.apache.ignite.ml.math.distances.EuclideanDistance) DoubleArrayVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer) HashMap(java.util.HashMap) KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) Test(org.junit.Test)

Example 4 with KNNClassificationModel

use of org.apache.ignite.ml.knn.classification.KNNClassificationModel in project ignite by apache.

the class BinaryClassificationEvaluatorTest method testEvaluatorWithoutFilter.

/**
 * Test evaluator and trainer on classification model y = x.
 */
@Test
public void testEvaluatorWithoutFilter() {
    Map<Integer, Vector> cacheMock = new HashMap<>();
    for (int i = 0; i < twoLinearlySeparableClasses.length; i++) cacheMock.put(i, VectorUtils.of(twoLinearlySeparableClasses[i]));
    KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3);
    Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
    KNNClassificationModel mdl = trainer.fit(cacheMock, parts, vectorizer);
    double score = Evaluator.evaluate(cacheMock, mdl, vectorizer, MetricName.ACCURACY);
    assertEquals(0.9919839679358717, score, 1e-12);
}
Also used : HashMap(java.util.HashMap) KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) TrainerTest(org.apache.ignite.ml.common.TrainerTest) Test(org.junit.Test)

Example 5 with KNNClassificationModel

use of org.apache.ignite.ml.knn.classification.KNNClassificationModel in project ignite by apache.

the class KNNClassificationExample method main.

/**
 * Run example.
 */
public static void main(String[] args) throws IOException {
    System.out.println();
    System.out.println(">>> kNN multi-class classification algorithm over cached dataset usage example started.");
    // Start ignite grid.
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        System.out.println(">>> Ignite grid started.");
        IgniteCache<Integer, Vector> dataCache = null;
        try {
            dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
            KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(3).withDistanceMeasure(new EuclideanDistance()).withWeighted(true).withDataTtl(60);
            Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
            KNNClassificationModel mdl = trainer.fit(ignite, dataCache, vectorizer);
            double accuracy = Evaluator.evaluate(dataCache, mdl, vectorizer, MetricName.ACCURACY);
            System.out.println("\n>>> Accuracy " + accuracy);
        } finally {
            if (dataCache != null)
                dataCache.destroy();
        }
    } finally {
        System.out.flush();
    }
}
Also used : SandboxMLCache(org.apache.ignite.examples.ml.util.SandboxMLCache) EuclideanDistance(org.apache.ignite.ml.math.distances.EuclideanDistance) KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) Ignite(org.apache.ignite.Ignite) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Vector(org.apache.ignite.ml.math.primitives.vector.Vector)

Aggregations

KNNClassificationModel (org.apache.ignite.ml.knn.classification.KNNClassificationModel)10 KNNClassificationTrainer (org.apache.ignite.ml.knn.classification.KNNClassificationTrainer)10 HashMap (java.util.HashMap)7 EuclideanDistance (org.apache.ignite.ml.math.distances.EuclideanDistance)7 Vector (org.apache.ignite.ml.math.primitives.vector.Vector)7 Test (org.junit.Test)7 DoubleArrayVectorizer (org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer)5 Ignite (org.apache.ignite.Ignite)3 SandboxMLCache (org.apache.ignite.examples.ml.util.SandboxMLCache)2 TrainerTest (org.apache.ignite.ml.common.TrainerTest)2 FileNotFoundException (java.io.FileNotFoundException)1 Random (java.util.Random)1 EncoderTrainer (org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)1 NormalizationTrainer (org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer)1 SHA256UniformMapper (org.apache.ignite.ml.selection.split.mapper.SHA256UniformMapper)1