use of org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer in project ignite by apache.
the class BinarizationExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Binarization example started.");
IgniteCache<Integer, Vector> data = null;
try {
data = createCache(ignite);
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(1);
// Defines second preprocessor that binarizes features.
Preprocessor<Integer, Vector> preprocessor = new BinarizationTrainer<Integer, Vector>().withThreshold(40).fit(ignite, data, vectorizer);
// Creates a cache based simple dataset containing features and providing standard dataset API.
try (SimpleDataset<?> dataset = DatasetFactory.createSimpleDataset(ignite, data, preprocessor)) {
new DatasetHelper(dataset).describe();
}
System.out.println(">>> Binarization example completed.");
} finally {
data.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer in project ignite by apache.
the class MaxAbsScalerExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Max abs example started.");
IgniteCache<Integer, Vector> data = null;
try {
data = createCache(ignite);
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(1, 2);
// Defines second preprocessor that imputing features.
Preprocessor<Integer, Vector> preprocessor = new MaxAbsScalerTrainer<Integer, Vector>().fit(ignite, data, vectorizer);
// Creates a cache based simple dataset containing features and providing standard dataset API.
try (SimpleDataset<?> dataset = DatasetFactory.createSimpleDataset(ignite, data, preprocessor)) {
new DatasetHelper(dataset).describe();
}
System.out.println(">>> Max abs example completed.");
} finally {
data.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer in project ignite by apache.
the class NormalizationExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Normalization example started.");
IgniteCache<Integer, Vector> data = null;
try {
data = createCache(ignite);
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(1, 2);
// Defines second preprocessor that normalizes features.
Preprocessor<Integer, Vector> preprocessor = new NormalizationTrainer<Integer, Vector>().withP(1).fit(ignite, data, vectorizer);
// Creates a cache based simple dataset containing features and providing standard dataset API.
try (SimpleDataset<?> dataset = DatasetFactory.createSimpleDataset(ignite, data, preprocessor)) {
new DatasetHelper(dataset).describe();
}
System.out.println(">>> Normalization example completed.");
} finally {
data.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer in project ignite by apache.
the class OneVsRestClassificationExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> One-vs-Rest SVM Multi-class classification model over cached dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.GLASS_IDENTIFICATION);
OneVsRestTrainer<SVMLinearClassificationModel> trainer = new OneVsRestTrainer<>(new SVMLinearClassificationTrainer().withAmountOfIterations(20).withAmountOfLocIterations(50).withLambda(0.2).withSeed(1234L));
MultiClassModel<SVMLinearClassificationModel> mdl = trainer.fit(ignite, dataCache, new DummyVectorizer<Integer>().labeled(0));
System.out.println(">>> One-vs-Rest SVM Multi-class model");
System.out.println(mdl.toString());
MinMaxScalerTrainer<Integer, Vector> minMaxScalerTrainer = new MinMaxScalerTrainer<>();
Preprocessor<Integer, Vector> preprocessor = minMaxScalerTrainer.fit(ignite, dataCache, new DummyVectorizer<Integer>().labeled(0));
MultiClassModel<SVMLinearClassificationModel> mdlWithScaling = trainer.fit(ignite, dataCache, preprocessor);
System.out.println(">>> One-vs-Rest SVM Multi-class model with MinMaxScaling");
System.out.println(mdlWithScaling.toString());
System.out.println(">>> ----------------------------------------------------------------");
System.out.println(">>> | Prediction\t| Prediction with MinMaxScaling\t| Ground Truth\t|");
System.out.println(">>> ----------------------------------------------------------------");
int amountOfErrors = 0;
int amountOfErrorsWithMinMaxScaling = 0;
int totalAmount = 0;
// Build confusion matrix. See https://en.wikipedia.org/wiki/Confusion_matrix
int[][] confusionMtx = { { 0, 0, 0 }, { 0, 0, 0 }, { 0, 0, 0 } };
int[][] confusionMtxWithMinMaxScaling = { { 0, 0, 0 }, { 0, 0, 0 }, { 0, 0, 0 } };
try (QueryCursor<Cache.Entry<Integer, Vector>> observations = dataCache.query(new ScanQuery<>())) {
for (Cache.Entry<Integer, Vector> observation : observations) {
Vector val = observation.getValue();
Vector inputs = val.copyOfRange(1, val.size());
double groundTruth = val.get(0);
double prediction = mdl.predict(inputs);
double predictionWithMinMaxScaling = mdlWithScaling.predict(inputs);
totalAmount++;
// Collect data for model
if (!Precision.equals(groundTruth, prediction, Precision.EPSILON))
amountOfErrors++;
int idx1 = (int) prediction == 1 ? 0 : ((int) prediction == 3 ? 1 : 2);
int idx2 = (int) groundTruth == 1 ? 0 : ((int) groundTruth == 3 ? 1 : 2);
confusionMtx[idx1][idx2]++;
// Collect data for model with min-max scaling
if (!Precision.equals(groundTruth, predictionWithMinMaxScaling, Precision.EPSILON))
amountOfErrorsWithMinMaxScaling++;
idx1 = (int) predictionWithMinMaxScaling == 1 ? 0 : ((int) predictionWithMinMaxScaling == 3 ? 1 : 2);
idx2 = (int) groundTruth == 1 ? 0 : ((int) groundTruth == 3 ? 1 : 2);
confusionMtxWithMinMaxScaling[idx1][idx2]++;
System.out.printf(">>> | %.4f\t\t| %.4f\t\t\t\t\t\t| %.4f\t\t|\n", prediction, predictionWithMinMaxScaling, groundTruth);
}
System.out.println(">>> ----------------------------------------------------------------");
System.out.println("\n>>> -----------------One-vs-Rest SVM model-------------");
System.out.println("\n>>> Absolute amount of errors " + amountOfErrors);
System.out.println("\n>>> Accuracy " + (1 - amountOfErrors / (double) totalAmount));
System.out.println("\n>>> Confusion matrix is " + Arrays.deepToString(confusionMtx));
System.out.println("\n>>> -----------------One-vs-Rest SVM model with MinMaxScaling-------------");
System.out.println("\n>>> Absolute amount of errors " + amountOfErrorsWithMinMaxScaling);
System.out.println("\n>>> Accuracy " + (1 - amountOfErrorsWithMinMaxScaling / (double) totalAmount));
System.out.println("\n>>> Confusion matrix is " + Arrays.deepToString(confusionMtxWithMinMaxScaling));
System.out.println(">>> One-vs-Rest SVM model over cache based dataset usage example completed.");
}
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer in project ignite by apache.
the class LinearRegressionLSQRTrainerExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws IOException {
System.out.println();
System.out.println(">>> Linear regression model over cache based dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.MORTALITY_DATA);
System.out.println(">>> Create new linear regression trainer object.");
LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();
System.out.println(">>> Perform the training to get the model.");
// This object is used to extract features and vectors from upstream entities which are
// essentially tuples of the form (key, value) (in our case (Integer, Vector)).
// Key part of tuple in our example is ignored.
// Label is extracted from 0th entry of the value (which is a Vector)
// and features are all remaining vector part. Alternatively we could use
// DatasetTrainer#fit(Ignite, IgniteCache, IgniteBiFunction, IgniteBiFunction) method call
// where there is a separate lambda for extracting label from (key, value) and a separate labmda for
// extracting features.
LinearRegressionModel mdl = trainer.fit(ignite, dataCache, new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST));
double rmse = Evaluator.evaluate(dataCache, mdl, new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST), MetricName.RMSE);
System.out.println("\n>>> Rmse = " + rmse);
System.out.println(">>> Linear regression model over cache based dataset usage example completed.");
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
Aggregations