Search in sources :

Example 26 with Vector

use of org.apache.ignite.ml.math.primitives.vector.Vector in project ignite by apache.

the class TitanicUtils method readPassengersWithoutNulls.

/**
 * Read passengers data from csv file.
 *
 * @param ignite The ignite.
 * @return The filled cache.
 * @throws FileNotFoundException If data file is not found.
 */
public static IgniteCache<Integer, Vector> readPassengersWithoutNulls(Ignite ignite) throws FileNotFoundException {
    IgniteCache<Integer, Vector> cache = getCache(ignite);
    Scanner scanner = new Scanner(IgniteUtils.resolveIgnitePath("examples/src/main/resources/datasets/titanic_without_nulls.csv"));
    int cnt = 0;
    while (scanner.hasNextLine()) {
        String row = scanner.nextLine();
        if (cnt == 0) {
            cnt++;
            continue;
        }
        String[] cells = row.split(";");
        Serializable[] data = new Serializable[cells.length];
        NumberFormat format = NumberFormat.getInstance(Locale.FRANCE);
        for (int i = 0; i < cells.length; i++) try {
            data[i] = "".equals(cells[i]) ? Double.NaN : Double.valueOf(cells[i]);
        } catch (java.lang.NumberFormatException e) {
            try {
                data[i] = format.parse(cells[i]).doubleValue();
            } catch (ParseException e1) {
                data[i] = cells[i];
            }
        }
        cache.put(cnt++, new DenseVector(data));
    }
    return cache;
}
Also used : Scanner(java.util.Scanner) Serializable(java.io.Serializable) ParseException(java.text.ParseException) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector) DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector) NumberFormat(java.text.NumberFormat)

Example 27 with Vector

use of org.apache.ignite.ml.math.primitives.vector.Vector in project ignite by apache.

the class Step_14_Parallel_Brute_Force_Search method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 14 (Brute Force) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new BruteForceStrategy()).addHyperParam("p", normalizationTrainer::withP, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL).withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)).withTrainer(trainerCV).isRunningOnPipeline(false).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 14 (Brute Force) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) BruteForceStrategy(org.apache.ignite.ml.selection.paramgrid.BruteForceStrategy) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation)

Example 28 with Vector

use of org.apache.ignite.ml.math.primitives.vector.Vector in project ignite by apache.

the class Step_16_Genetic_Programming_Search method main.

/**
 * Run example.
 */
public static void main(String[] args) {
    System.out.println();
    System.out.println(">>> Tutorial step 16 (Genetic Programming) example started.");
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        try {
            IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
            // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
            final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
            TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
            Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(6).fit(ignite, dataCache, vectorizer);
            Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
            Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
            NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>().withP(1);
            Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer.fit(ignite, dataCache, minMaxScalerPreprocessor);
            // Tune hyper-parameters with K-fold Cross-Validation on the split training set.
            DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer();
            CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>();
            ParamGrid paramGrid = new ParamGrid().withParameterSearchStrategy(new EvolutionOptimizationStrategy()).addHyperParam("p", normalizationTrainer::withP, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0 }).addHyperParam("minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] { 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0 });
            scoreCalculator.withIgnite(ignite).withUpstreamCache(dataCache).withTrainer(trainerCV).withMetric(MetricName.ACCURACY).withFilter(split.getTrainFilter()).isRunningOnPipeline(false).withPreprocessor(normalizationPreprocessor).withAmountOfFolds(3).withParamGrid(paramGrid);
            CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters();
            System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease"));
            DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer().withMaxDeep(crossValidationRes.getBest("maxDeep")).withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease"));
            System.out.println(crossValidationRes);
            System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore()));
            System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams());
            System.out.println("Best average score: " + crossValidationRes.getBestAvgScore());
            crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams));
            // Train decision tree model.
            DecisionTreeModel bestMdl = trainer.fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
            System.out.println("\n>>> Trained model: " + bestMdl);
            double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>());
            System.out.println("\n>>> Accuracy " + accuracy);
            System.out.println("\n>>> Test Error " + (1 - accuracy));
            System.out.println(">>> Tutorial step 16 (Genetic Programming) example completed.");
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DecisionTreeModel(org.apache.ignite.ml.tree.DecisionTreeModel) FileNotFoundException(java.io.FileNotFoundException) EvolutionOptimizationStrategy(org.apache.ignite.ml.selection.paramgrid.EvolutionOptimizationStrategy) Ignite(org.apache.ignite.Ignite) EncoderTrainer(org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) CrossValidationResult(org.apache.ignite.ml.selection.cv.CrossValidationResult) ParamGrid(org.apache.ignite.ml.selection.paramgrid.ParamGrid) DecisionTreeClassificationTrainer(org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer) CrossValidation(org.apache.ignite.ml.selection.cv.CrossValidation)

Example 29 with Vector

use of org.apache.ignite.ml.math.primitives.vector.Vector in project ignite by apache.

the class ImputingExample method createCache.

/**
 */
private static IgniteCache<Integer, Vector> createCache(Ignite ignite) {
    CacheConfiguration<Integer, Vector> cacheConfiguration = new CacheConfiguration<>();
    cacheConfiguration.setName("PERSONS");
    cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 2));
    IgniteCache<Integer, Vector> persons = ignite.createCache(cacheConfiguration);
    persons.put(1, new DenseVector(new Serializable[] { "Mike", 10, 1 }));
    persons.put(1, new DenseVector(new Serializable[] { "John", 20, 2 }));
    persons.put(1, new DenseVector(new Serializable[] { "George", 15, 1 }));
    persons.put(1, new DenseVector(new Serializable[] { "Piter", 25, Double.NaN }));
    persons.put(1, new DenseVector(new Serializable[] { "Karl", Double.NaN, 1 }));
    persons.put(1, new DenseVector(new Serializable[] { "Gustaw", 20, 2 }));
    persons.put(1, new DenseVector(new Serializable[] { "Alex", 20, 2 }));
    return persons;
}
Also used : Serializable(java.io.Serializable) RendezvousAffinityFunction(org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector) CacheConfiguration(org.apache.ignite.configuration.CacheConfiguration) DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector)

Example 30 with Vector

use of org.apache.ignite.ml.math.primitives.vector.Vector in project ignite by apache.

the class MinMaxScalerExample method main.

/**
 * Run example.
 */
public static void main(String[] args) throws Exception {
    try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
        System.out.println(">>> MinMax preprocessing example started.");
        IgniteCache<Integer, Vector> data = null;
        try {
            data = createCache(ignite);
            Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(1, 2);
            // Defines second preprocessor that imputing features.
            Preprocessor<Integer, Vector> preprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, data, vectorizer);
            // Creates a cache based simple dataset containing features and providing standard dataset API.
            try (SimpleDataset<?> dataset = DatasetFactory.createSimpleDataset(ignite, data, preprocessor)) {
                new DatasetHelper(dataset).describe();
            }
            System.out.println(">>> MinMax preprocessing example completed.");
        } finally {
            data.destroy();
        }
    } finally {
        System.out.flush();
    }
}
Also used : DummyVectorizer(org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer) Ignite(org.apache.ignite.Ignite) Vector(org.apache.ignite.ml.math.primitives.vector.Vector) DenseVector(org.apache.ignite.ml.math.primitives.vector.impl.DenseVector) DatasetHelper(org.apache.ignite.examples.ml.util.DatasetHelper)

Aggregations

Vector (org.apache.ignite.ml.math.primitives.vector.Vector)265 DenseVector (org.apache.ignite.ml.math.primitives.vector.impl.DenseVector)95 Test (org.junit.Test)94 Ignite (org.apache.ignite.Ignite)78 LabeledVector (org.apache.ignite.ml.structures.LabeledVector)49 HashMap (java.util.HashMap)39 SandboxMLCache (org.apache.ignite.examples.ml.util.SandboxMLCache)38 DummyVectorizer (org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer)26 FileNotFoundException (java.io.FileNotFoundException)22 TrainerTest (org.apache.ignite.ml.common.TrainerTest)22 DecisionTreeClassificationTrainer (org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer)21 DecisionTreeModel (org.apache.ignite.ml.tree.DecisionTreeModel)21 Serializable (java.io.Serializable)19 IgniteCache (org.apache.ignite.IgniteCache)18 EncoderTrainer (org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer)16 Cache (javax.cache.Cache)15 DoubleArrayVectorizer (org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer)15 EuclideanDistance (org.apache.ignite.ml.math.distances.EuclideanDistance)14 ArrayList (java.util.ArrayList)12 ModelsComposition (org.apache.ignite.ml.composition.ModelsComposition)12