use of org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer in project ignite by apache.
the class GDBOnTreesRegressionExportImportExample method main.
/**
* Run example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> GDB regression trainer example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
// Create cache with training data.
CacheConfiguration<Integer, double[]> trainingSetCfg = createCacheConfiguration();
IgniteCache<Integer, double[]> trainingSet = null;
Path jsonMdlPath = null;
try {
trainingSet = fillTrainingData(ignite, trainingSetCfg);
// Create regression trainer.
GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 1, 0.).withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.001));
// Train decision tree model.
GDBModel mdl = trainer.fit(ignite, trainingSet, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
System.out.println("\n>>> Exported GDB regression model: " + mdl.toString(true));
predictOnGeneratedData(mdl);
jsonMdlPath = Files.createTempFile(null, null);
mdl.toJSON(jsonMdlPath);
IgniteFunction<Double, Double> lbMapper = lb -> lb;
GDBModel modelImportedFromJSON = GDBModel.fromJSON(jsonMdlPath).withLblMapping(lbMapper);
System.out.println("\n>>> Imported GDB regression model: " + modelImportedFromJSON.toString(true));
predictOnGeneratedData(modelImportedFromJSON);
System.out.println(">>> GDB regression trainer example completed.");
} finally {
if (trainingSet != null)
trainingSet.destroy();
if (jsonMdlPath != null)
Files.deleteIfExists(jsonMdlPath);
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer in project ignite by apache.
the class RandomForestIntegrationTest method testFit.
/**
*/
@Test
public void testFit() {
int size = 100;
CacheConfiguration<Integer, double[]> trainingSetCacheCfg = new CacheConfiguration<>();
trainingSetCacheCfg.setAffinity(new RendezvousAffinityFunction(false, 10));
trainingSetCacheCfg.setName("TRAINING_SET");
IgniteCache<Integer, double[]> data = ignite.createCache(trainingSetCacheCfg);
Random rnd = new Random(0);
for (int i = 0; i < size; i++) {
double x = rnd.nextDouble() - 0.5;
data.put(i, new double[] { x, x > 0 ? 1 : 0 });
}
ArrayList<FeatureMeta> meta = new ArrayList<>();
meta.add(new FeatureMeta("", 0, false));
RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(meta).withAmountOfTrees(5).withFeaturesCountSelectionStrgy(x -> 2);
RandomForestModel mdl = trainer.fit(ignite, data, new DoubleArrayVectorizer<Integer>().labeled(1));
assertTrue(mdl.getPredictionsAggregator() instanceof MeanValuePredictionsAggregator);
assertEquals(5, mdl.getModels().size());
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer in project ignite by apache.
the class StackingTest method testSimpleVectorStack.
/**
* Tests simple stack training.
*/
@Test
public void testSimpleVectorStack() {
StackedVectorDatasetTrainer<Double, LinearRegressionModel, Double> trainer = new StackedVectorDatasetTrainer<>();
UpdatesStrategy<SmoothParametrized, SimpleGDParameterUpdate> updatesStgy = new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG);
MLPArchitecture arch = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
DatasetTrainer<MultilayerPerceptron, Double> mlpTrainer = new MLPTrainer<>(arch, LossFunctions.MSE, updatesStgy, 3000, 10, 50, 123L).withConvertedLabels(VectorUtils::num2Arr);
final double factor = 3;
StackedModel<Vector, Vector, Double, LinearRegressionModel> mdl = trainer.withAggregatorTrainer(new LinearRegressionLSQRTrainer().withConvertedLabels(x -> x * factor)).addMatrix2MatrixTrainer(mlpTrainer).withEnvironmentBuilder(TestUtils.testEnvBuilder()).fit(getCacheMock(xor), parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(0.0, 0.0)), 0.3);
assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(0.0, 1.0)), 0.3);
assertEquals(1.0 * factor, mdl.predict(VectorUtils.of(1.0, 0.0)), 0.3);
assertEquals(0.0 * factor, mdl.predict(VectorUtils.of(1.0, 1.0)), 0.3);
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer in project ignite by apache.
the class GDBTrainerTest method testFitRegression.
/**
*/
@Test
public void testFitRegression() {
int size = 100;
double[] xs = new double[size];
double[] ys = new double[size];
double from = -5.0;
double to = 5.0;
double step = Math.abs(from - to) / size;
Map<Integer, double[]> learningSample = new HashMap<>();
for (int i = 0; i < size; i++) {
xs[i] = from + step * i;
ys[i] = 2 * xs[i];
learningSample.put(i, new double[] { xs[i], ys[i] });
}
GDBTrainer trainer = new GDBRegressionOnTreesTrainer(1.0, 2000, 3, 0.0).withUsingIdx(true);
IgniteModel<Vector, Double> mdl = trainer.fit(learningSample, 1, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.LAST));
double mse = 0.0;
for (int j = 0; j < size; j++) {
double x = xs[j];
double y = ys[j];
double p = mdl.predict(VectorUtils.of(x));
mse += Math.pow(y - p, 2);
}
mse /= size;
assertEquals(0.0, mse, 0.0001);
ModelsComposition composition = (ModelsComposition) mdl;
assertTrue(!composition.toString().isEmpty());
assertTrue(!composition.toString(true).isEmpty());
assertTrue(!composition.toString(false).isEmpty());
composition.getModels().forEach(m -> assertTrue(m instanceof DecisionTreeModel));
assertEquals(2000, composition.getModels().size());
assertTrue(composition.getPredictionsAggregator() instanceof WeightedPredictionsAggregator);
trainer = trainer.withCheckConvergenceStgyFactory(new MeanAbsValueConvergenceCheckerFactory(0.1));
assertTrue(trainer.fit(learningSample, 1, new DoubleArrayVectorizer<Integer>().labeled(1)).getModels().size() < 2000);
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DoubleArrayVectorizer in project ignite by apache.
the class BaggingTest method testNaiveBaggingLogRegression.
/**
* Test that bagged log regression makes correct predictions.
*/
@Test
public void testNaiveBaggingLogRegression() {
Map<Integer, double[]> cacheMock = getCacheMock(twoLinearlySeparableClasses);
DatasetTrainer<LogisticRegressionModel, Double> trainer = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)).withMaxIterations(30000).withLocIterations(100).withBatchSize(10).withSeed(123L);
BaggedTrainer<Double> baggedTrainer = TrainerTransformers.makeBagged(trainer, 7, 0.7, 2, 2, new OnMajorityPredictionsAggregator()).withEnvironmentBuilder(TestUtils.testEnvBuilder());
BaggedModel mdl = baggedTrainer.fit(cacheMock, parts, new DoubleArrayVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST));
Vector weights = ((LogisticRegressionModel) ((AdaptableDatasetModel) ((ModelsParallelComposition) ((AdaptableDatasetModel) mdl.model()).innerModel()).submodels().get(0)).innerModel()).weights();
TestUtils.assertEquals(firstMdlWeights.get(parts), weights, 0.0);
TestUtils.assertEquals(0, mdl.predict(VectorUtils.of(100, 10)), PRECISION);
TestUtils.assertEquals(1, mdl.predict(VectorUtils.of(10, 100)), PRECISION);
}
Aggregations