Search in sources :

Example 1 with DecisionTreeClassificationTrainer

use of org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer in project ignite by apache.

the class TrainingWithCustomPreprocessorsExample method main.

/**
 * Run example.
 *
 * @param args Command line arguments.
 * @throws Exception Exception.
 */
public static void main(String[] args) throws Exception {
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        IgniteCache<Integer, Vector> trainingSet = null;
        try {
            trainingSet = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.BOSTON_HOUSE_PRICES);
            Vectorizer<Integer, Vector, Integer, Double> basicVectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, trainingSet, basicVectorizer);
            // In-place definition of custom preprocessor by lambda expression.
            Preprocessor<Integer, Vector> customPreprocessor = (k, v) -> {
                LabeledVector res = imputingPreprocessor.apply(k, v);
                double fifthFeature = res.features().get(5);
                Vector updatedVector = res.features().set(5, fifthFeature > 0 ? Math.log(fifthFeature) : -1);
                return updatedVector.labeled(res.label());
            };
            Vectorizer9000 customVectorizer = new Vectorizer9000(customPreprocessor);
            PipelineMdl<Integer, Vector> mdl = new Pipeline<Integer, Vector, Integer, Double>().addVectorizer(customVectorizer).addPreprocessingTrainer(new MinMaxScalerTrainer<Integer, Vector>()).addPreprocessingTrainer(new NormalizationTrainer<Integer, Vector>().withP(1)).addPreprocessingTrainer(getCustomTrainer()).addTrainer(new DecisionTreeClassificationTrainer(5, 0)).fit(ignite, trainingSet);
            System.out.println(">>> Perform scoring.");
            double score = Evaluator.evaluate(trainingSet, mdl, mdl.getPreprocessor(), MetricName.R2);
            System.out.println(">>> R^2 score: " + score);
        } finally {
            if (trainingSet != null)
                trainingSet.destroy();
        }
    } finally {
        System.out.flush();
    }
}
Also used : PipelineMdl(org.apache.ignite.ml.pipeline.PipelineMdl) Evaluator(org.apache.ignite.ml.selection.scoring.evaluator.Evaluator) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) Preprocessor(org.apache.ignite.ml.preprocessing.Preprocessor) Ignite(org.apache.ignite.Ignite) DatasetBuilder(org.apache.ignite.ml.dataset.DatasetBuilder) PreprocessingTrainer(org.apache.ignite.ml.preprocessing.PreprocessingTrainer) IgniteCache(org.apache.ignite.IgniteCache) DummyVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer) Ignition(org.apache.ignite.Ignition) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) MLSandboxDatasets(org.apache.ignite.examples.ml.util.MLSandboxDatasets) SandboxMLCache(org.apache.ignite.examples.ml.util.SandboxMLCache) VectorUtils(org.apache.ignite.ml.math.primitives.vector.VectorUtils) MetricName(org.apache.ignite.ml.selection.scoring.metric.MetricName) ImputerTrainer(org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) Pipeline(org.apache.ignite.ml.pipeline.Pipeline) MinMaxScalerTrainer(org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer) LearningEnvironmentBuilder(org.apache.ignite.ml.environment.LearningEnvironmentBuilder) Vectorizer(org.apache.ignite.ml.dataset.feature.extractor.Vectorizer) SandboxMLCache(org.apache.ignite.examples.ml.util.SandboxMLCache) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) Pipeline(org.apache.ignite.ml.pipeline.Pipeline) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) Ignite(org.apache.ignite.Ignite) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) LabeledVector(org.apache.ignite.ml.structures.LabeledVector)

Example 2 with DecisionTreeClassificationTrainer

use of org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer in project ignite by apache.

the class DecisionTreeClassificationExportImportExample method main.

/**
 * Executes example.
 *
 * @param args Command line arguments, none required.
 */
public static void main(String[] args) throws IOException {
    System.out.println(">>> Decision tree classification trainer example started.");
    // Start ignite grid.
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        System.out.println("\n>>> Ignite grid started.");
        // Create cache with training data.
        CacheConfiguration<Integer, LabeledVector<Double>> trainingSetCfg = new CacheConfiguration<>();
        trainingSetCfg.setName("TRAINING_SET");
        trainingSetCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
        IgniteCache<Integer, LabeledVector<Double>> trainingSet = null;
        Path jsonMdlPath = null;
        try {
            trainingSet = ignite.createCache(trainingSetCfg);
            Random rnd = new Random(0);
            // Fill training data.
            for (int i = 0; i < 1000; i++) trainingSet.put(i, generatePoint(rnd));
            // Create classification trainer.
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
            // Train decision tree model.
            LabeledDummyVectorizer<Integer, Double> vectorizer = new LabeledDummyVectorizer<>();
            DecisionTreeModel mdl = trainer.fit(ignite, trainingSet, vectorizer);
            System.out.println("\n>>> Exported Decision tree classification model: " + mdl);
            int correctPredictions = evaluateModel(rnd, mdl);
            System.out.println("\n>>> Accuracy for exported Decision tree classification model: " + correctPredictions / 10.0 + "%");
            jsonMdlPath = Files.createTempFile(null, null);
            mdl.toJSON(jsonMdlPath);
            DecisionTreeModel modelImportedFromJSON = DecisionTreeModel.fromJSON(jsonMdlPath);
            System.out.println("\n>>> Imported Decision tree classification model: " + modelImportedFromJSON);
            correctPredictions = evaluateModel(rnd, modelImportedFromJSON);
            System.out.println("\n>>> Accuracy for imported Decision tree classification model: " + correctPredictions / 10.0 + "%");
            System.out.println("\n>>> Decision tree classification trainer example completed.");
        } finally {
            if (trainingSet != null)
                trainingSet.destroy();
            if (jsonMdlPath != null)
                Files.deleteIfExists(jsonMdlPath);
        }
    } finally {
        System.out.flush();
    }
}
Also used : Path(java.nio.file.Path) DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) LabeledVector(org.apache.ignite.ml.structures.LabeledVector) LabeledDummyVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) Random(java.util.Random) Ignite(org.apache.ignite.Ignite) RendezvousAffinityFunction(org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction) CacheConfiguration(org.apache.ignite.configuration.CacheConfiguration)

Example 3 with DecisionTreeClassificationTrainer

use of org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer in project ignite by apache.

the class Step_9_Scaling_With_Stacking method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 9 (scaling with stacking) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
            6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
            DecisionTreeClassificationTrainer trainer1 = new DecisionTreeClassificationTrainer(3, 0);
            DecisionTreeClassificationTrainer trainer2 = new DecisionTreeClassificationTrainer(4, 0);
            LogisticRegressionSGDTrainer aggregator = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG));
            StackedModel<Vector, Vector, Double, LogisticRegressionModel> mdl = new StackedVectorDatasetTrainer<>(aggregator).addTrainerWithDoubleOutput(trainer).addTrainerWithDoubleOutput(trainer1).addTrainerWithDoubleOutput(trainer2).fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + mdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), mdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 9 (scaling with stacking) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : LogisticRegressionSGDTrainer(org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer) FileNotFoundException(java.io.FileNotFoundException) LogisticRegressionModel(org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) StackedVectorDatasetTrainer(org.apache.ignite.ml.composition.stacking.StackedVectorDatasetTrainer) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) SimpleGDUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector)

Example 4 with DecisionTreeClassificationTrainer

use of org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer in project ignite by apache.

the class Step_15_Parallel_Random_Search method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 15 (Parallel Random Search) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new RandomStrategy().withMaxTries(10).withSeed(12L)).addHyperParam("p", normalizationTrainer::withP, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL).withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)).withTrainer(trainerCV).isRunningOnPipeline(false).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 15 (Parallel Random Search) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation)

Example 5 with DecisionTreeClassificationTrainer

use of org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer in project ignite by apache.

the class DecisionTreeClassificationTrainerSQLInferenceExample method main.

/**
 * Run example.
 */
public static void main(String[] args) throws IOException {
    System.out.println(">>> Decision tree classification trainer example started.");
    // Start ignite grid.
    try (Ignite ignite = Ignition.start("examples/config/example-ignite-ml.xml")) {
        System.out.println(">>> Ignite grid started.");
        // Dummy cache is required to perform SQL queries.
        CacheConfiguration<?, ?> cacheCfg = new CacheConfiguration<>(DUMMY_CACHE_NAME).setSqlSchema("PUBLIC").setSqlFunctionClasses(SQLFunctions.class);
        IgniteCache<?, ?> cache = null;
        try {
            cache = ignite.getOrCreateCache(cacheCfg);
            System.out.println(">>> Creating table with training data...");
            cache.query(new SqlFieldsQuery("create table titanic_train (\n" + "    passengerid int primary key,\n" + "    pclass int,\n" + "    survived int,\n" + "    name varchar(255),\n" + "    sex varchar(255),\n" + "    age float,\n" + "    sibsp int,\n" + "    parch int,\n" + "    ticket varchar(255),\n" + "    fare float,\n" + "    cabin varchar(255),\n" + "    embarked varchar(255)\n" + ") with \"template=partitioned\";")).getAll();
            System.out.println(">>> Creating table with test data...");
            cache.query(new SqlFieldsQuery("create table titanic_test (\n" + "    passengerid int primary key,\n" + "    pclass int,\n" + "    survived int,\n" + "    name varchar(255),\n" + "    sex varchar(255),\n" + "    age float,\n" + "    sibsp int,\n" + "    parch int,\n" + "    ticket varchar(255),\n" + "    fare float,\n" + "    cabin varchar(255),\n" + "    embarked varchar(255)\n" + ") with \"template=partitioned\";")).getAll();
            loadTitanicDatasets(ignite, cache);
            System.out.println(">>> Prepare trainer...");
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(4, 0);
            System.out.println(">>> Perform training...");
            DecisionTreeModel mdl = trainer.fit(new SqlDatasetBuilder(ignite, "SQL_PUBLIC_TITANIC_TRAIN"), new BinaryObjectVectorizer<>("pclass", "age", "sibsp", "parch", "fare").withFeature("sex", BinaryObjectVectorizer.Mapping.create().map("male", 1.0).defaultValue(0.0)).labeled("survived"));
            System.out.println(">>> Saving model...");
            // Model storage is used to store raw serialized model.
            System.out.println("Saving model into model storage...");
            IgniteModelStorageUtil.saveModel(ignite, mdl, "titanic_model_tree");
            // Making inference using saved model.
            System.out.println("Inference...");
            try (QueryCursor<List<?>> cursor = cache.query(new SqlFieldsQuery("select " + "survived as truth, " + "predict('titanic_model_tree', pclass, age, sibsp, parch, fare, case sex when 'male' then 1 else 0 end) as prediction" + " from titanic_train"))) {
                // Print inference result.
                System.out.println("| Truth | Prediction |");
                System.out.println("|--------------------|");
                for (List<?> row : cursor) System.out.println("|     " + row.get(0) + " |        " + row.get(1) + " |");
            }
            IgniteModelStorageUtil.removeModel(ignite, "titanic_model_tree");
        } finally {
            cache.query(new SqlFieldsQuery("DROP TABLE titanic_train"));
            cache.query(new SqlFieldsQuery("DROP TABLE titanic_test"));
            cache.destroy();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) Ignite(org.apache.ignite.Ignite) List(java.util.List) CacheConfiguration(org.apache.ignite.configuration.CacheConfiguration) SqlFieldsQuery(org.apache.ignite.cache.query.SqlFieldsQuery) SqlDatasetBuilder(org.apache.ignite.ml.sql.SqlDatasetBuilder)

Aggregations

DecisionTreeClassificationTrainer (org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer)31 Ignite (org.apache.ignite.Ignite)28 DecisionTreeModel (org.apache.ignite.ml.tree.DecisionTreeModel)26 FileNotFoundException (java.io.FileNotFoundException)21 Vector (org.apache.ignite.ml.math.primitives.vector.Vector)21 EncoderTrainer (org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)14 NormalizationTrainer (org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer)9 CrossValidation (org.apache.ignite.ml.selection.cv.CrossValidation)9 CrossValidationResult (org.apache.ignite.ml.selection.cv.CrossValidationResult)7 ParamGrid (org.apache.ignite.ml.selection.paramgrid.ParamGrid)7 SandboxMLCache (org.apache.ignite.examples.ml.util.SandboxMLCache)5 CacheConfiguration (org.apache.ignite.configuration.CacheConfiguration)4 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)4 HashMap (java.util.HashMap)3 Random (java.util.Random)3 RendezvousAffinityFunction (org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction)3 LabeledDummyVectorizer (org.apache.ignite.ml.dataset.feature.extractor.impl.LabeledDummyVectorizer)3 MinMaxScalerTrainer (org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer)3 Test (org.junit.Test)3 List (java.util.List)2