Search in sources :

Example 6 with ParamGrid

use of org.apache.ignite.ml.selection.paramgrid.ParamGrid in project ignite by apache.

the class Step_8_CV_with_Param_Grid method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 8 (cross-validation with param grid) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
            6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.25, 0.5 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withTrainer(trainerCV).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 8 (cross-validation with param grid) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) NormalizationTrainer(org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) Ignite(org.apache.ignite.Ignite) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult)

Example 7 with ParamGrid

use of org.apache.ignite.ml.selection.paramgrid.ParamGrid in project ignite by apache.

the class Step_13_RandomSearch method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 13 (Random Search) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyperparams with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new RandomStrategy().withMaxTries(10).withSeed(12L)).addHyperParam("p", normalizationTrainer::withP, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withTrainer(trainerCV).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 13 (Random Search) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation)

Example 8 with ParamGrid

use of org.apache.ignite.ml.selection.paramgrid.ParamGrid in project ignite by apache.

the class Step_17_Parallel_Genetic_Programming_Search method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 17 (Parallel Genetic Programming) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new EvolutionOptimizationStrategy()).addHyperParam("p", normalizationTrainer::withP, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL).withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)).withTrainer(trainerCV).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 17 (Parallel Genetic Programming) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) EvolutionOptimizationStrategy(org.apache.ignite.ml.selection.paramgrid.EvolutionOptimizationStrategy) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation)

Example 9 with ParamGrid

use of org.apache.ignite.ml.selection.paramgrid.ParamGrid in project ignite by apache.

the class CrossValidationTest method testRandomSearchWithPipeline.

/**
 */
@Test
public void testRandomSearchWithPipeline() {
    Map<Integer, double[]> data = new HashMap<>();
    for (int i = 0; i < twoLinearlySeparableClasses.length; i++) data.put(i, twoLinearlySeparableClasses[i]);
    LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)).withMaxIterations(100000).withLocIterations(100).withBatchSize(14).withSeed(123L);
    Vectorizer<Integer, double[], Integer, Double> vectorizer = new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
    ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new RandomStrategy().withMaxTries(10).withSeed(1234L).withSatisfactoryFitness(0.9)).addHyperParam("maxIterations", trainer::withMaxIterations, new Double[] { 10.0, 100.0, 1000.0, 10000.0 }).addHyperParam("locIterations", trainer::withLocIterations, new Double[] { 10.0, 100.0, 1000.0, 10000.0 }).addHyperParam("batchSize", trainer::withBatchSize, new Double[] { 1.0, 2.0, 4.0, 8.0, 16.0 });
    Pipeline<Integer, double[], Integer, Double> pipeline = new Pipeline<Integer, double[], Integer, Double>().addVectorizer(vectorizer).addTrainer(trainer);
    DebugCrossValidation<LogisticRegressionModel, Integer, double[]> scoreCalculator = (DebugCrossValidation<LogisticRegressionModel, Integer, double[]>) new DebugCrossValidation<LogisticRegressionModel, Integer, double[]>().withUpstreamMap(data).withAmountOfParts(1).withPipeline(pipeline).withMetric(MetricName.ACCURACY).withPreprocessor(vectorizer).withAmountOfFolds(4).isRunningOnPipeline(true).withParamGrid(paramGrid);
    CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
    assertEquals(crossValidationRes.getBestAvgScore(), 0.9343858500738256, 1e-6);
    assertEquals(crossValidationRes.getScoringBoard().size(), 10);
}
Also used : LogisticRegressionSGDTrainer(org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer) HashMap(java.util.HashMap) LogisticRegressionModel(org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel) Pipeline(org.apache.ignite.ml.pipeline.Pipeline) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) SimpleGDUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) Test(org.junit.Test)

Example 10 with ParamGrid

use of org.apache.ignite.ml.selection.paramgrid.ParamGrid in project ignite by apache.

the class CrossValidationTest method testRandomSearch.

/**
 */
@Test
public void testRandomSearch() {
    Map<Integer, double[]> data = new HashMap<>();
    for (int i = 0; i < twoLinearlySeparableClasses.length; i++) data.put(i, twoLinearlySeparableClasses[i]);
    LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)).withMaxIterations(100000).withLocIterations(100).withBatchSize(14).withSeed(123L);
    Vectorizer<Integer, double[], Integer, Double> vectorizer = new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
    ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new RandomStrategy().withMaxTries(10).withSeed(1234L).withSatisfactoryFitness(0.9)).addHyperParam("maxIterations", trainer::withMaxIterations, new Double[] { 10.0, 100.0, 1000.0, 10000.0 }).addHyperParam("locIterations", trainer::withLocIterations, new Double[] { 10.0, 100.0, 1000.0, 10000.0 }).addHyperParam("batchSize", trainer::withBatchSize, new Double[] { 1.0, 2.0, 4.0, 8.0, 16.0 });
    DebugCrossValidation<LogisticRegressionModel, Integer, double[]> scoreCalculator = (DebugCrossValidation<LogisticRegressionModel, Integer, double[]>) new DebugCrossValidation<LogisticRegressionModel, Integer, double[]>().withUpstreamMap(data).withAmountOfParts(1).withTrainer(trainer).withMetric(MetricName.ACCURACY).withPreprocessor(vectorizer).withAmountOfFolds(4).isRunningOnPipeline(false).withParamGrid(paramGrid);
    CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
    assertEquals(crossValidationRes.getBestAvgScore(), 0.9343858500738256, 1e-6);
    assertEquals(crossValidationRes.getScoringBoard().size(), 10);
}
Also used : LogisticRegressionSGDTrainer(org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer) HashMap(java.util.HashMap) LogisticRegressionModel(org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) SimpleGDUpdateCalculator(org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator) RandomStrategy(org.apache.ignite.ml.selection.paramgrid.RandomStrategy) Test(org.junit.Test)

Aggregations

ParamGrid (org.apache.ignite.ml.selection.paramgrid.ParamGrid)10 FileNotFoundException (java.io.FileNotFoundException)7 Ignite (org.apache.ignite.Ignite)7 Vector (org.apache.ignite.ml.math.primitives.vector.Vector)7 CrossValidation (org.apache.ignite.ml.selection.cv.CrossValidation)7 CrossValidationResult (org.apache.ignite.ml.selection.cv.CrossValidationResult)7 DecisionTreeClassificationTrainer (org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer)7 DecisionTreeModel (org.apache.ignite.ml.tree.DecisionTreeModel)7 EncoderTrainer (org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)6 RandomStrategy (org.apache.ignite.ml.selection.paramgrid.RandomStrategy)4 HashMap (java.util.HashMap)3 SimpleGDUpdateCalculator (org.apache.ignite.ml.optimization.updatecalculators.SimpleGDUpdateCalculator)3 LogisticRegressionModel (org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel)3 LogisticRegressionSGDTrainer (org.apache.ignite.ml.regressions.logistic.LogisticRegressionSGDTrainer)3 Test (org.junit.Test)3 Pipeline (org.apache.ignite.ml.pipeline.Pipeline)2 EvolutionOptimizationStrategy (org.apache.ignite.ml.selection.paramgrid.EvolutionOptimizationStrategy)2 MinMaxScalerTrainer (org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer)1 NormalizationTrainer (org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer)1 BruteForceStrategy (org.apache.ignite.ml.selection.paramgrid.BruteForceStrategy)1