use of org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer in project ignite by apache.
the class RandomForestRegressionExportImportExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> Random Forest regression algorithm over cached dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println("\n>>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
Path jsonMdlPath = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.BOSTON_HOUSE_PRICES);
AtomicInteger idx = new AtomicInteger(0);
RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())).withAmountOfTrees(101).withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD).withMaxDepth(4).withMinImpurityDelta(0.).withSubSampleSize(0.3).withSeed(0);
trainer.withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL).withLoggingFactoryDependency(ConsoleLogger.Factory.LOW));
System.out.println("\n>>> Configured trainer: " + trainer.getClass().getSimpleName());
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
RandomForestModel mdl = trainer.fit(ignite, dataCache, vectorizer);
System.out.println("\n>>> Exported Random Forest regression model: " + mdl.toString(true));
double mae = evaluateModel(dataCache, mdl);
System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
jsonMdlPath = Files.createTempFile(null, null);
mdl.toJSON(jsonMdlPath);
RandomForestModel modelImportedFromJSON = RandomForestModel.fromJSON(jsonMdlPath);
System.out.println("\n>>> Exported Random Forest regression model: " + modelImportedFromJSON.toString(true));
mae = evaluateModel(dataCache, modelImportedFromJSON);
System.out.println("\n>>> Mean absolute error (MAE) for exported Random Forest regression model " + mae);
System.out.println("\n>>> Random Forest regression algorithm over cached dataset usage example completed.");
} finally {
if (dataCache != null)
dataCache.destroy();
if (jsonMdlPath != null)
Files.deleteIfExists(jsonMdlPath);
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer in project ignite by apache.
the class LearningEnvironmentTest method testBasic.
/**
*/
@Test
public void testBasic() {
RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(IntStream.range(0, 0).mapToObj(x -> new FeatureMeta("", 0, false)).collect(Collectors.toList())).withAmountOfTrees(101).withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD).withMaxDepth(4).withMinImpurityDelta(0.).withSubSampleSize(0.3).withSeed(0);
LearningEnvironmentBuilder envBuilder = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyType(ParallelismStrategy.Type.ON_DEFAULT_POOL).withLoggingFactoryDependency(part -> ConsoleLogger.factory(MLLogger.VerboseLevel.LOW));
trainer.withEnvironmentBuilder(envBuilder);
assertEquals(DefaultParallelismStrategy.class, trainer.learningEnvironment().parallelismStrategy().getClass());
assertEquals(ConsoleLogger.class, trainer.learningEnvironment().logger().getClass());
}
use of org.apache.ignite.ml.tree.randomforest.RandomForestRegressionTrainer in project ignite by apache.
the class RandomForestRegressionExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> Random Forest regression algorithm over cached dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.BOSTON_HOUSE_PRICES);
AtomicInteger idx = new AtomicInteger(0);
RandomForestRegressionTrainer trainer = new RandomForestRegressionTrainer(IntStream.range(0, dataCache.get(1).size() - 1).mapToObj(x -> new FeatureMeta("", idx.getAndIncrement(), false)).collect(Collectors.toList())).withAmountOfTrees(101).withFeaturesCountSelectionStrgy(FeaturesCountSelectionStrategies.ONE_THIRD).withMaxDepth(4).withMinImpurityDelta(0.).withSubSampleSize(0.3).withSeed(0);
trainer.withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL).withLoggingFactoryDependency(ConsoleLogger.Factory.LOW));
System.out.println(">>> Configured trainer: " + trainer.getClass().getSimpleName());
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
ModelsComposition randomForestMdl = trainer.fit(ignite, dataCache, vectorizer);
System.out.println(">>> Trained model: " + randomForestMdl.toString(true));
double mse = 0.0;
double mae = 0.0;
int totalAmount = 0;
try (QueryCursor<Cache.Entry<Integer, Vector>> observations = dataCache.query(new ScanQuery<>())) {
for (Cache.Entry<Integer, Vector> observation : observations) {
Vector val = observation.getValue();
Vector inputs = val.copyOfRange(1, val.size());
double groundTruth = val.get(0);
double prediction = randomForestMdl.predict(inputs);
mse += Math.pow(prediction - groundTruth, 2.0);
mae += Math.abs(prediction - groundTruth);
totalAmount++;
}
System.out.println("\n>>> Evaluated model on " + totalAmount + " data points.");
mse /= totalAmount;
System.out.println("\n>>> Mean squared error (MSE) " + mse);
mae /= totalAmount;
System.out.println("\n>>> Mean absolute error (MAE) " + mae);
System.out.println(">>> Random Forest regression algorithm over cached dataset usage example completed.");
}
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
Aggregations