use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class VectorToMatrixTest method testLikeMatrix.
/**
*/
@Test
public void testLikeMatrix() {
consumeSampleVectors((v, desc) -> {
if (!availableForTesting(v))
return;
final Matrix matrix = v.likeMatrix(1, 1);
Class<? extends Vector> key = v.getClass();
Class<? extends Matrix> expMatrixType = typesMap.get(key);
assertNotNull("Expect non-null matrix for " + key.getSimpleName() + " in " + desc, matrix);
Class<? extends Matrix> actualMatrixType = matrix.getClass();
assertTrue("Expected matrix type " + expMatrixType.getSimpleName() + " should be assignable from actual type " + actualMatrixType.getSimpleName() + " in " + desc, expMatrixType.isAssignableFrom(actualMatrixType));
for (int rows : new int[] { 1, 2 }) for (int cols : new int[] { 1, 2 }) {
final Matrix actualMatrix = v.likeMatrix(rows, cols);
String details = "rows " + rows + " cols " + cols;
assertNotNull("Expect non-null matrix for " + details + " in " + desc, actualMatrix);
assertEquals("Unexpected number of rows in " + desc, rows, actualMatrix.rowSize());
assertEquals("Unexpected number of cols in " + desc, cols, actualMatrix.columnSize());
}
});
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class LUDecompositionExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
System.out.println(">>> LU decomposition example started.");
// Let's compute a LU decomposition for some (n x n) matrix m:
// m = p l u, where
// p is an (n x n) is a row-permutation matrix
// l is a (n x n) lower triangular matrix
// u is a (n x n) upper triangular matrix
DenseLocalOnHeapMatrix m = new DenseLocalOnHeapMatrix(new double[][] { { 1.0d, 1.0d, -1.0d }, { 1.0d, -2.0d, 3.0d }, { 2.0d, 3.0d, 1.0d } });
System.out.println("\n>>> Matrix m for decomposition: ");
Tracer.showAscii(m);
// This decomposition is useful when dealing with systems of linear equations.
// (see https://en.wikipedia.org/wiki/LU_decomposition)
// suppose we want to solve system
// m x = b for various bs. Then after we computed LU decomposition, we can feed various bs
// as a matrix of the form
// (b1, b2, ..., bm)
// to the method LUDecomposition::solve which returns solutions in the form
// (sol1, sol2, ..., solm)
LUDecomposition dec = new LUDecomposition(m);
System.out.println("\n>>> Made decomposition.");
System.out.println(">>> Matrix getL is ");
Tracer.showAscii(dec.getL());
System.out.println(">>> Matrix getU is ");
Tracer.showAscii(dec.getU());
System.out.println(">>> Matrix getP is ");
Tracer.showAscii(dec.getP());
Matrix bs = new DenseLocalOnHeapMatrix(new double[][] { { 4.0, -6.0, 7.0 }, { 1.0, 1.0, 1.0 } });
System.out.println("\n>>> Matrix to solve: ");
Tracer.showAscii(bs);
Matrix sol = dec.solve(bs.transpose());
System.out.println("\n>>> List of solutions: ");
for (int i = 0; i < sol.columnSize(); i++) Tracer.showAscii(sol.viewColumn(i));
System.out.println("\n>>> LU decomposition example completed.");
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class OffHeapMatrixExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Off-heap matrix API usage example started.");
System.out.println("\n>>> Creating a matrix to be transposed.");
double[][] data = new double[][] { { 1, 2, 3 }, { 4, 5, 6 } };
Matrix m = new DenseLocalOffHeapMatrix(data.length, data[0].length);
m.assign(data);
Matrix transposed = m.transpose();
System.out.println(">>> Matrix: ");
MatrixExampleUtil.print(m);
System.out.println(">>> Transposed matrix: ");
MatrixExampleUtil.print(transposed);
MatrixExampleUtil.verifyTransposition(m, transposed);
System.out.println("\n>>> Creating matrices to be multiplied.");
double[][] data1 = new double[][] { { 1, 2 }, { 3, 4 } };
double[][] data2 = new double[][] { { 5, 6 }, { 7, 8 } };
Matrix m1 = new DenseLocalOffHeapMatrix(data1.length, data1[0].length);
Matrix m2 = new DenseLocalOffHeapMatrix(data2.length, data2[0].length);
m1.assign(data1);
m2.assign(data2);
Matrix mult = m1.times(m2);
System.out.println(">>> First matrix: ");
MatrixExampleUtil.print(m1);
System.out.println(">>> Second matrix: ");
MatrixExampleUtil.print(m2);
System.out.println(">>> Matrix product: ");
MatrixExampleUtil.print(mult);
System.out.println("\n>>> Calculating matrices determinants.");
double det1 = m1.determinant();
double det2 = m2.determinant();
double detMult = mult.determinant();
boolean detMultIsAsExp = Math.abs(detMult - det1 * det2) < 0.0001d;
System.out.println(">>> First matrix determinant: [" + det1 + "].");
System.out.println(">>> Second matrix determinant: [" + det2 + "].");
System.out.println(">>> Matrix product determinant: [" + detMult + "], equals product of two other matrices determinants: [" + detMultIsAsExp + "].");
System.out.println("Determinant of product matrix [" + detMult + "] should be equal to product of determinants [" + (det1 * det2) + "].");
System.out.println("\n>>> Off-heap matrix API usage example completed.");
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class SparseMatrixExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Sparse matrix API usage example started.");
System.out.println("\n>>> Creating a matrix to be transposed.");
double[][] data = new double[][] { { 1, 2, 3 }, { 4, 5, 6 } };
Matrix m = new SparseLocalOnHeapMatrix(data.length, data[0].length);
m.assign(data);
Matrix transposed = m.transpose();
System.out.println(">>> Matrix: ");
MatrixExampleUtil.print(m);
System.out.println(">>> Transposed matrix: ");
MatrixExampleUtil.print(transposed);
MatrixExampleUtil.verifyTransposition(m, transposed);
System.out.println("\n>>> Creating matrices to be multiplied.");
double[][] data1 = new double[][] { { 1, 2 }, { 3, 4 } };
double[][] data2 = new double[][] { { 5, 6 }, { 7, 8 } };
Matrix m1 = new SparseLocalOnHeapMatrix(data1.length, data1[0].length);
Matrix m2 = new SparseLocalOnHeapMatrix(data2.length, data2[0].length);
m1.assign(data1);
m2.assign(data2);
Matrix mult = m1.times(m2);
System.out.println(">>> First matrix: ");
MatrixExampleUtil.print(m1);
System.out.println(">>> Second matrix: ");
MatrixExampleUtil.print(m2);
System.out.println(">>> Matrix product: ");
MatrixExampleUtil.print(mult);
System.out.println("\n>>> Calculating matrices determinants.");
double det1 = m1.determinant();
double det2 = m2.determinant();
double detMult = mult.determinant();
boolean detMultIsAsExp = Math.abs(detMult - det1 * det2) < 0.0001d;
System.out.println(">>> First matrix determinant: [" + det1 + "].");
System.out.println(">>> Second matrix determinant: [" + det2 + "].");
System.out.println(">>> Matrix product determinant: [" + detMult + "], equals product of two other matrices determinants: [" + detMultIsAsExp + "].");
System.out.println("Determinant of product matrix [" + detMult + "] should be equal to product of determinants [" + (det1 * det2) + "].");
System.out.println("\n>>> Sparse matrix API usage example completed.");
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class MLPGroupTrainerExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) throws InterruptedException {
// IMPL NOTE based on MLPGroupTrainerTest#testXOR
System.out.println(">>> Distributed multilayer perceptron example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
// Create IgniteThread, we must work with SparseDistributedMatrix inside IgniteThread
// because we create ignite cache internally.
IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), MLPGroupTrainerExample.class.getSimpleName(), () -> {
int samplesCnt = 10000;
Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = LabeledVectorsCache.createNew(ignite);
String cacheName = cache.getName();
Random rnd = new Random(12345L);
try (IgniteDataStreamer<Integer, LabeledVector<Vector, Vector>> streamer = ignite.dataStreamer(cacheName)) {
streamer.perNodeBufferSize(100);
for (int i = 0; i < samplesCnt; i++) {
int col = Math.abs(rnd.nextInt()) % 4;
streamer.addData(i, new LabeledVector<>(xorInputs.getCol(col), xorOutputs.getCol(col)));
}
}
int totalCnt = 100;
int failCnt = 0;
MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite).withSyncPeriod(3).withTolerance(0.001).withMaxGlobalSteps(20);
for (int i = 0; i < totalCnt; i++) {
MLPGroupUpdateTrainerCacheInput trainerInput = new MLPGroupUpdateTrainerCacheInput(conf, new RandomInitializer(rnd), 6, cache, 10);
MultilayerPerceptron mlp = trainer.train(trainerInput);
Matrix predict = mlp.apply(xorInputs);
System.out.println(">>> Prediction data at step " + i + " of total " + totalCnt + ":");
Tracer.showAscii(predict);
System.out.println("Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
failCnt += closeEnough(xorOutputs.getRow(0), predict.getRow(0)) ? 0 : 1;
}
double failRatio = (double) failCnt / totalCnt;
System.out.println("\n>>> Fail percentage: " + (failRatio * 100) + "%.");
System.out.println("\n>>> Distributed multilayer perceptron example completed.");
});
igniteThread.start();
igniteThread.join();
}
}
Aggregations