use of org.apache.ignite.examples.ml.dataset.model.Person in project ignite by apache.
the class AlgorithmSpecificDatasetExample method createCache.
/**
*/
private static IgniteCache<Integer, Person> createCache(Ignite ignite) {
CacheConfiguration<Integer, Person> cacheConfiguration = new CacheConfiguration<>();
cacheConfiguration.setName("PERSONS");
cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 2));
IgniteCache<Integer, Person> persons = ignite.createCache(cacheConfiguration);
persons.put(1, new Person("Mike", 1, 1));
persons.put(2, new Person("John", 2, 2));
persons.put(3, new Person("George", 3, 3));
persons.put(4, new Person("Karl", 4, 4));
return persons;
}
use of org.apache.ignite.examples.ml.dataset.model.Person in project ignite by apache.
the class AlgorithmSpecificDatasetExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Algorithm Specific Dataset example started.");
IgniteCache<Integer, Person> persons = createCache(ignite);
// labels are extracted, and partition data and context are created.
try (AlgorithmSpecificDataset dataset = DatasetFactory.create(ignite, persons, (upstream, upstreamSize) -> new AlgorithmSpecificPartitionContext(), new SimpleLabeledDatasetDataBuilder<Integer, Person, AlgorithmSpecificPartitionContext>((k, v) -> new double[] { v.getAge() }, (k, v) -> v.getSalary(), 1).andThen((data, ctx) -> {
double[] features = data.getFeatures();
int rows = data.getRows();
// Makes a copy of features to supplement it by columns with values equal to 1.0.
double[] a = new double[features.length + rows];
for (int i = 0; i < rows; i++) a[i] = 1.0;
System.arraycopy(features, 0, a, rows, features.length);
return new SimpleLabeledDatasetData(a, rows, data.getCols() + 1, data.getLabels());
})).wrap(AlgorithmSpecificDataset::new)) {
// Trains linear regression model using gradient descent.
double[] linearRegressionMdl = new double[2];
for (int i = 0; i < 1000; i++) {
double[] gradient = dataset.gradient(linearRegressionMdl);
if (BLAS.getInstance().dnrm2(gradient.length, gradient, 1) < 1e-4)
break;
for (int j = 0; j < gradient.length; j++) linearRegressionMdl[j] -= 0.1 / persons.size() * gradient[j];
}
System.out.println("Linear Regression Model: " + Arrays.toString(linearRegressionMdl));
}
System.out.println(">>> Algorithm Specific Dataset example completed.");
}
}
use of org.apache.ignite.examples.ml.dataset.model.Person in project ignite by apache.
the class LocalDatasetExample method createCache.
/**
*/
private static Map<Integer, Person> createCache(Ignite ignite) {
Map<Integer, Person> persons = new HashMap<>();
persons.put(1, new Person("Mike", 42, 10000));
persons.put(2, new Person("John", 32, 64000));
persons.put(3, new Person("George", 53, 120000));
persons.put(4, new Person("Karl", 24, 70000));
return persons;
}
use of org.apache.ignite.examples.ml.dataset.model.Person in project ignite by apache.
the class CacheBasedDatasetExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws Exception {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Cache Based Dataset example started.");
IgniteCache<Integer, Person> persons = createCache(ignite);
// Creates a cache based simple dataset containing features and providing standard dataset API.
try (SimpleDataset<?> dataset = DatasetFactory.createSimpleDataset(ignite, persons, (k, v) -> new double[] { v.getAge(), v.getSalary() }, 2)) {
// Calculation of the mean value. This calculation will be performed in map-reduce manner.
double[] mean = dataset.mean();
System.out.println("Mean \n\t" + Arrays.toString(mean));
// Calculation of the standard deviation. This calculation will be performed in map-reduce manner.
double[] std = dataset.std();
System.out.println("Standard deviation \n\t" + Arrays.toString(std));
// Calculation of the covariance matrix. This calculation will be performed in map-reduce manner.
double[][] cov = dataset.cov();
System.out.println("Covariance matrix ");
for (double[] row : cov) System.out.println("\t" + Arrays.toString(row));
// Calculation of the correlation matrix. This calculation will be performed in map-reduce manner.
double[][] corr = dataset.corr();
System.out.println("Correlation matrix ");
for (double[] row : corr) System.out.println("\t" + Arrays.toString(row));
}
System.out.println(">>> Cache Based Dataset example completed.");
}
}
use of org.apache.ignite.examples.ml.dataset.model.Person in project ignite by apache.
the class CacheBasedDatasetExample method createCache.
/**
*/
private static IgniteCache<Integer, Person> createCache(Ignite ignite) {
CacheConfiguration<Integer, Person> cacheConfiguration = new CacheConfiguration<>();
cacheConfiguration.setName("PERSONS");
cacheConfiguration.setAffinity(new RendezvousAffinityFunction(false, 2));
IgniteCache<Integer, Person> persons = ignite.createCache(cacheConfiguration);
persons.put(1, new Person("Mike", 42, 10000));
persons.put(2, new Person("John", 32, 64000));
persons.put(3, new Person("George", 53, 120000));
persons.put(4, new Person("Karl", 24, 70000));
return persons;
}
Aggregations