Search in sources :

Example 16 with EncoderTrainer

use of org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer in project gridgain by gridgain.

the class Step_10_RandomSearch method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 10 (Random Search) example started.");
    try (Ignite ignite = Ignition.start("examples-ml/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyperparams with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeNode, Double, Integer, Vector> scoreCalculator = new CrossValidation<>();
            SerializableDoubleConsumer maxDeep = trainerCV::withMaxDeep;
            SerializableDoubleConsumer minImpurityDecrease = trainerCV::withMinImpurityDecrease;
            SerializableDoubleConsumer p = normalizationTrainer::withP;
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new RandomStrategy().withMaxTries(10).withSeed(12L)).addHyperParam("p", p, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", maxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", minImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            SerializableFunction<BinaryClassificationMetricValues, Double> acc = m -> m.accuracy();
            BinaryClassificationMetrics metrics = (BinaryClassificationMetrics) new BinaryClassificationMetrics().withNegativeClsLb(0.0).withPositiveClsLb(1.0).withMetric(acc);
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withTrainer(trainerCV).withMetric(metrics).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParamterers();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeNode bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 10 (Random Search) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : Arrays(java.util.Arrays) Evaluator(org.apache.ignite.ml.selection.scoring.evaluator.Evaluator) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) Preprocessor(org.apache.ignite.ml.preprocessing.Preprocessor) EncoderType(org.apache.ignite.ml.preprocessing.encoding.EncoderType) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation) BinaryClassificationMetrics(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetrics) SerializableDoubleConsumer(org.apache.ignite.examples.ml.util.SerializableDoubleConsumer) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) DecisionTreeNode(org.apache.ignite.ml.tree.DecisionTreeNode) TrainTestDatasetSplitter(org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) MinMaxScalerTrainer(org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer) Vectorizer(org.apache.ignite.ml.dataset.feature.extractor.Vectorizer) Ignite(org.apache.ignite.Ignite) FileNotFoundException(java.io.FileNotFoundException) IgniteCache(org.apache.ignite.IgniteCache) DummyVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer) Accuracy(org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy) Ignition(org.apache.ignite.Ignition) ImputerTrainer(org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer) TrainTestSplit(org.apache.ignite.ml.selection.split.TrainTestSplit) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) SerializableFunction(org.apache.ignite.examples.ml.util.SerializableFunction) BinaryClassificationMetricValues(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetricValues) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) TitanicUtils(org.apache.ignite.examples.ml.tutorial.TitanicUtils) FileNotFoundException(java.io.FileNotFoundException) SerializableDoubleConsumer(org.apache.ignite.examples.ml.util.SerializableDoubleConsumer) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) BinaryClassificationMetrics(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetrics) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation) BinaryClassificationMetricValues(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetricValues) DecisionTreeNode(org.apache.ignite.ml.tree.DecisionTreeNode)

Example 17 with EncoderTrainer

use of org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer in project gridgain by gridgain.

the class Step_5_Scaling method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 5 (scaling) example started.");
    try (Ignite ignite = Ignition.start("examples-ml/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
            6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
            // Train decision tree model.
            DecisionTreeNode mdl = trainer.fit(ignite, dataCache, normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + mdl);
            double accuracy = Evaluator.evaluate(dataCache, mdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 5 (scaling) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : FileNotFoundException(java.io.FileNotFoundException) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) DecisionTreeNode(org.apache.ignite.ml.tree.DecisionTreeNode)

Example 18 with EncoderTrainer

use of org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer in project gridgain by gridgain.

the class Step_6_KNN method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 6 (kNN) example started.");
    try (Ignite ignite = Ignition.start("examples-ml/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
            6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
            KNNClassificationTrainer trainer = new KNNClassificationTrainer().withK(1).withWeighted(true);
            // Train KNN tree model.
            KNNClassificationModel mdl = trainer.fit(ignite, dataCache, normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + mdl);
            double accuracy = Evaluator.evaluate(dataCache, mdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 6 (kNN) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : KNNClassificationTrainer(org.apache.ignite.ml.knn.classification.KNNClassificationTrainer) FileNotFoundException(java.io.FileNotFoundException) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) KNNClassificationModel(org.apache.ignite.ml.knn.classification.KNNClassificationModel) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector)

Example 19 with EncoderTrainer

use of org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer in project gridgain by gridgain.

the class Step_13_Genetic_Programming_Search method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 13 (Genetic Programming) example started.");
    try (Ignite ignite = Ignition.start("examples-ml/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyperparams with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeNode, Double, Integer, Vector> scoreCalculator = new CrossValidation<>();
            SerializableDoubleConsumer maxDeep = trainerCV::withMaxDeep;
            SerializableDoubleConsumer minImpurityDecrease = trainerCV::withMinImpurityDecrease;
            SerializableDoubleConsumer p = normalizationTrainer::withP;
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new EvolutionOptimizationStrategy()).addHyperParam("p", p, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", maxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", minImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            SerializableFunction<BinaryClassificationMetricValues, Double> acc = m -> m.accuracy();
            BinaryClassificationMetrics metrics = (BinaryClassificationMetrics) new BinaryClassificationMetrics().withNegativeClsLb(0.0).withPositiveClsLb(1.0).withMetric(acc);
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withTrainer(trainerCV).withMetric(metrics).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParamterers();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeNode bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 13 (Genetic Programming) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : Arrays(java.util.Arrays) Evaluator(org.apache.ignite.ml.selection.scoring.evaluator.Evaluator) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) Preprocessor(org.apache.ignite.ml.preprocessing.Preprocessor) EncoderType(org.apache.ignite.ml.preprocessing.encoding.EncoderType) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation) BinaryClassificationMetrics(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetrics) SerializableDoubleConsumer(org.apache.ignite.examples.ml.util.SerializableDoubleConsumer) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) DecisionTreeNode(org.apache.ignite.ml.tree.DecisionTreeNode) TrainTestDatasetSplitter(org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter) EvolutionOptimizationStrategy(org.apache.ignite.ml.selection.paramgrid.EvolutionOptimizationStrategy) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) MinMaxScalerTrainer(org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer) Vectorizer(org.apache.ignite.ml.dataset.feature.extractor.Vectorizer) Ignite(org.apache.ignite.Ignite) FileNotFoundException(java.io.FileNotFoundException) IgniteCache(org.apache.ignite.IgniteCache) DummyVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer) Accuracy(org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy) Ignition(org.apache.ignite.Ignition) ImputerTrainer(org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer) TrainTestSplit(org.apache.ignite.ml.selection.split.TrainTestSplit) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) SerializableFunction(org.apache.ignite.examples.ml.util.SerializableFunction) BinaryClassificationMetricValues(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetricValues) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) TitanicUtils(org.apache.ignite.examples.ml.tutorial.TitanicUtils) FileNotFoundException(java.io.FileNotFoundException) EvolutionOptimizationStrategy(org.apache.ignite.ml.selection.paramgrid.EvolutionOptimizationStrategy) SerializableDoubleConsumer(org.apache.ignite.examples.ml.util.SerializableDoubleConsumer) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) BinaryClassificationMetrics(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetrics) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation) BinaryClassificationMetricValues(org.apache.ignite.ml.selection.scoring.metric.classification.BinaryClassificationMetricValues) DecisionTreeNode(org.apache.ignite.ml.tree.DecisionTreeNode)

Example 20 with EncoderTrainer

use of org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer in project ignite by apache.

the class LabelEncoderExample method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Train Decision Tree model on mushrooms.csv dataset.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Object[]> dataCache = new SandboxMLCache(ignite).fillObjectCacheWithCategoricalData(MLSandboxDatasets.MUSHROOMS);
            final Vectorizer<Integer, Object[], Integer, Object> vectorizer = new ObjectArrayVectorizer<Integer>(1, 2).labeled(0);
            Preprocessor<Integer, Object[]> strEncoderPreprocessor = new EncoderTrainer<Integer, Object[]>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(0).withEncodedFeature(1).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Object[]> lbEncoderPreprocessor = new EncoderTrainer<Integer, Object[]>().withEncoderType(EncoderType.LABEL_ENCODER).fit(ignite, dataCache, strEncoderPreprocessor);
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
            // Train decision tree model.
            DecisionTreeModel mdl = trainer.fit(ignite, dataCache, lbEncoderPreprocessor);
            System.out.println("\n>>> Trained model: " + mdl);
            double accuracy = Evaluator.evaluate(dataCache, mdl, lbEncoderPreprocessor, new Accuracy());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Train Decision Tree model on mushrooms.csv dataset.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : SandboxMLCache(org.apache.ignite.examples.ml.util.SandboxMLCache) DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) Accuracy(org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)

Aggregations

FileNotFoundException (java.io.FileNotFoundException)32 Ignite (org.apache.ignite.Ignite)32 EncoderTrainer (org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)32 Vector (org.apache.ignite.ml.math.primitives.vector.Vector)30 DecisionTreeClassificationTrainer (org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer)27 NormalizationTrainer (org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer)21 CrossValidation (org.apache.ignite.ml.selection.cv.CrossValidation)15 CrossValidationResult (org.apache.ignite.ml.selection.cv.CrossValidationResult)13 ParamGrid (org.apache.ignite.ml.selection.paramgrid.ParamGrid)13 DecisionTreeModel (org.apache.ignite.ml.tree.DecisionTreeModel)12 DecisionTreeNode (org.apache.ignite.ml.tree.DecisionTreeNode)12 Accuracy (org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy)10 SerializableDoubleConsumer (org.apache.ignite.examples.ml.util.SerializableDoubleConsumer)7 Arrays (java.util.Arrays)6 IgniteCache (org.apache.ignite.IgniteCache)6 Ignition (org.apache.ignite.Ignition)6 SerializableFunction (org.apache.ignite.examples.ml.util.SerializableFunction)6 Vectorizer (org.apache.ignite.ml.dataset.feature.extractor.Vectorizer)6 DummyVectorizer (org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer)6 Preprocessor (org.apache.ignite.ml.preprocessing.Preprocessor)6