use of org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel in project ignite by apache.
the class LogRegFromSparkThroughPMMLExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> Logistic regression model loaded from PMML over partitioned dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
String path = IgniteUtils.resolveIgnitePath("examples/src/main/resources/models/spark/iris.pmml").toPath().toAbsolutePath().toString();
LogisticRegressionModel mdl = PMMLParser.load(path);
System.out.println(">>> Logistic regression model: " + mdl);
double accuracy = Evaluator.evaluate(dataCache, mdl, vectorizer, new Accuracy<>());
System.out.println("\n>>> Accuracy " + accuracy);
System.out.println("\n>>> Test Error " + (1 - accuracy));
} finally {
if (dataCache != null)
dataCache.destroy();
}
}
}
use of org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel in project ignite by apache.
the class Step_9_Scaling_With_Stacking method main.
/**
* Run example.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Tutorial step 9 (scaling with stacking) example started.");
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
try {
IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite);
// Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare".
final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1);
TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>().split(0.75);
Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>().withEncoderType(EncoderType.STRING_ENCODER).withEncodedFeature(1).withEncodedFeature(// <--- Changed index here.
6).fit(ignite, dataCache, vectorizer);
Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>().fit(ignite, dataCache, strEncoderPreprocessor);
Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>().fit(ignite, dataCache, imputingPreprocessor);
Preprocessor<Integer, Vector> normalizationPreprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, dataCache, minMaxScalerPreprocessor);
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
DecisionTreeClassificationTrainer trainer1 = new DecisionTreeClassificationTrainer(3, 0);
DecisionTreeClassificationTrainer trainer2 = new DecisionTreeClassificationTrainer(4, 0);
LogisticRegressionSGDTrainer aggregator = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG));
StackedModel<Vector, Vector, Double, LogisticRegressionModel> mdl = new StackedVectorDatasetTrainer<>(aggregator).addTrainerWithDoubleOutput(trainer).addTrainerWithDoubleOutput(trainer1).addTrainerWithDoubleOutput(trainer2).fit(ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor);
System.out.println("\n>>> Trained model: " + mdl);
double accuracy = Evaluator.evaluate(dataCache, split.getTestFilter(), mdl, normalizationPreprocessor, new Accuracy<>());
System.out.println("\n>>> Accuracy " + accuracy);
System.out.println("\n>>> Test Error " + (1 - accuracy));
System.out.println(">>> Tutorial step 9 (scaling with stacking) example completed.");
} catch (FileNotFoundException e) {
e.printStackTrace();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel in project ignite by apache.
the class LogisticRegressionSGDTrainerExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> Logistic regression model over partitioned dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);
System.out.println(">>> Create new logistic regression trainer object.");
LogisticRegressionSGDTrainer trainer = new LogisticRegressionSGDTrainer().withUpdatesStgy(new UpdatesStrategy<>(new SimpleGDUpdateCalculator(0.2), SimpleGDParameterUpdate.SUM_LOCAL, SimpleGDParameterUpdate.AVG)).withMaxIterations(100000).withLocIterations(100).withBatchSize(10).withSeed(123L);
System.out.println(">>> Perform the training to get the model.");
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
LogisticRegressionModel mdl = trainer.fit(ignite, dataCache, vectorizer);
System.out.println(">>> Logistic regression model: " + mdl);
double accuracy = Evaluator.evaluate(dataCache, mdl, vectorizer, MetricName.ACCURACY);
System.out.println("\n>>> Accuracy " + accuracy);
System.out.println(">>> Logistic regression model over partitioned dataset usage example completed.");
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel in project ignite by apache.
the class CollectionsTest method test.
/**
*/
@Test
@SuppressWarnings("unchecked")
public void test() {
test(new VectorizedViewMatrix(new DenseMatrix(2, 2), 1, 1, 1, 1), new VectorizedViewMatrix(new DenseMatrix(3, 2), 2, 1, 1, 1));
specialTest(new ManhattanDistance(), new ManhattanDistance());
specialTest(new HammingDistance(), new HammingDistance());
specialTest(new EuclideanDistance(), new EuclideanDistance());
FeatureMetadata data = new FeatureMetadata("name2");
data.setName("name1");
test(data, new FeatureMetadata("name2"));
test(new DatasetRow<>(new DenseVector()), new DatasetRow<>(new DenseVector(1)));
test(new LabeledVector<>(new DenseVector(), null), new LabeledVector<>(new DenseVector(1), null));
test(new Dataset<DatasetRow<Vector>>(new DatasetRow[] {}, new FeatureMetadata[] {}), new Dataset<DatasetRow<Vector>>(new DatasetRow[] { new DatasetRow() }, new FeatureMetadata[] { new FeatureMetadata() }));
test(new LogisticRegressionModel(new DenseVector(), 1.0), new LogisticRegressionModel(new DenseVector(), 0.5));
test(new KMeansModelFormat(new Vector[] {}, new ManhattanDistance()), new KMeansModelFormat(new Vector[] {}, new HammingDistance()));
test(new KMeansModel(new Vector[] {}, new ManhattanDistance()), new KMeansModel(new Vector[] {}, new HammingDistance()));
test(new SVMLinearClassificationModel(null, 1.0), new SVMLinearClassificationModel(null, 0.5));
test(new ANNClassificationModel(new LabeledVectorSet<>(), new ANNClassificationTrainer.CentroidStat()), new ANNClassificationModel(new LabeledVectorSet<>(1, 1), new ANNClassificationTrainer.CentroidStat()));
test(new ANNModelFormat(1, new ManhattanDistance(), false, new LabeledVectorSet<>(), new ANNClassificationTrainer.CentroidStat()), new ANNModelFormat(2, new ManhattanDistance(), false, new LabeledVectorSet<>(), new ANNClassificationTrainer.CentroidStat()));
}
use of org.apache.ignite.ml.regressions.logistic.LogisticRegressionModel in project ignite by apache.
the class LocalModelsTest method importExportLogisticRegressionModelTest.
/**
*/
@Test
public void importExportLogisticRegressionModelTest() throws IOException {
executeModelTest(mdlFilePath -> {
LogisticRegressionModel mdl = new LogisticRegressionModel(new DenseVector(new double[] { 1, 2 }), 3);
Exporter<LogisticRegressionModel, String> exporter = new FileExporter<>();
mdl.saveModel(exporter, mdlFilePath);
LogisticRegressionModel load = exporter.load(mdlFilePath);
Assert.assertNotNull(load);
Assert.assertEquals("", mdl, load);
return null;
});
}
Aggregations