use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class AbstractMultipleLinearRegressionTest method testNewSample.
/**
* Verifies that newSampleData methods consistently insert unitary columns
* in design matrix. Confirms the fix for MATH-411.
*/
@Test
public void testNewSample() {
double[] design = new double[] { 1, 19, 22, 33, 2, 20, 30, 40, 3, 25, 35, 45, 4, 27, 37, 47 };
double[] y = new double[] { 1, 2, 3, 4 };
double[][] x = new double[][] { { 19, 22, 33 }, { 20, 30, 40 }, { 25, 35, 45 }, { 27, 37, 47 } };
AbstractMultipleLinearRegression regression = createRegression();
regression.newSampleData(design, 4, 3, new DenseLocalOnHeapMatrix());
Matrix flatX = regression.getX().copy();
Vector flatY = regression.getY().copy();
regression.newXSampleData(new DenseLocalOnHeapMatrix(x));
regression.newYSampleData(new DenseLocalOnHeapVector(y));
Assert.assertEquals(flatX, regression.getX());
Assert.assertEquals(flatY, regression.getY());
// No intercept
regression.setNoIntercept(true);
regression.newSampleData(design, 4, 3, new DenseLocalOnHeapMatrix());
flatX = regression.getX().copy();
flatY = regression.getY().copy();
regression.newXSampleData(new DenseLocalOnHeapMatrix(x));
regression.newYSampleData(new DenseLocalOnHeapVector(y));
Assert.assertEquals(flatX, regression.getX());
Assert.assertEquals(flatY, regression.getY());
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class OLSMultipleLinearRegressionTest method testPerfectFit.
/** */
@Test
public void testPerfectFit() {
double[] betaHat = regression.estimateRegressionParameters();
TestUtils.assertEquals(new double[] { 11.0, 1.0 / 2.0, 2.0 / 3.0, 3.0 / 4.0, 4.0 / 5.0, 5.0 / 6.0 }, betaHat, 1e-13);
double[] residuals = regression.estimateResiduals();
TestUtils.assertEquals(new double[] { 0d, 0d, 0d, 0d, 0d, 0d }, residuals, 1e-13);
Matrix errors = regression.estimateRegressionParametersVariance();
final double[] s = { 1.0, -1.0 / 2.0, -1.0 / 3.0, -1.0 / 4.0, -1.0 / 5.0, -1.0 / 6.0 };
Matrix refVar = new DenseLocalOnHeapMatrix(s.length, s.length);
for (int i = 0; i < refVar.rowSize(); i++) for (int j = 0; j < refVar.columnSize(); j++) {
if (i == 0) {
refVar.setX(i, j, s[j]);
continue;
}
double x = s[i] * s[j];
refVar.setX(i, j, (i == j) ? 2 * x : x);
}
Assert.assertEquals(0.0, TestUtils.maximumAbsoluteRowSum(errors.minus(refVar)), 5.0e-16 * TestUtils.maximumAbsoluteRowSum(refVar));
Assert.assertEquals(1, ((OLSMultipleLinearRegression) regression).calculateRSquared(), 1E-12);
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class OLSMultipleLinearRegressionTest method testNewSample2.
/**
* Verifies that setting X and Y separately has the same effect as newSample(X,Y).
*/
@Test
public void testNewSample2() {
double[] y = new double[] { 1, 2, 3, 4 };
double[][] x = new double[][] { { 19, 22, 33 }, { 20, 30, 40 }, { 25, 35, 45 }, { 27, 37, 47 } };
OLSMultipleLinearRegression regression = new OLSMultipleLinearRegression();
regression.newSampleData(new DenseLocalOnHeapVector(y), new DenseLocalOnHeapMatrix(x));
Matrix combinedX = regression.getX().copy();
Vector combinedY = regression.getY().copy();
regression.newXSampleData(new DenseLocalOnHeapMatrix(x));
regression.newYSampleData(new DenseLocalOnHeapVector(y));
Assert.assertEquals(combinedX, regression.getX());
Assert.assertEquals(combinedY, regression.getY());
// No intercept
regression.setNoIntercept(true);
regression.newSampleData(new DenseLocalOnHeapVector(y), new DenseLocalOnHeapMatrix(x));
combinedX = regression.getX().copy();
combinedY = regression.getY().copy();
regression.newXSampleData(new DenseLocalOnHeapMatrix(x));
regression.newYSampleData(new DenseLocalOnHeapVector(y));
Assert.assertEquals(combinedX, regression.getX());
Assert.assertEquals(combinedY, regression.getY());
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class MatrixCustomStorageExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
System.out.println();
System.out.println(">>> Matrix API usage example started.");
System.out.println("\n>>> Creating a matrix to be transposed.");
double[][] data = new double[][] { { 1, 2, 3 }, { 4, 5, 6 } };
Matrix m = new MatrixCustomStorage(data);
Matrix transposed = m.transpose();
System.out.println(">>> Matrix: ");
MatrixExampleUtil.print(m);
System.out.println(">>> Transposed matrix: ");
MatrixExampleUtil.print(transposed);
MatrixExampleUtil.verifyTransposition(m, transposed);
System.out.println("\n>>> Creating matrices to be multiplied.");
double[][] data1 = new double[][] { { 1, 2 }, { 3, 4 } };
double[][] data2 = new double[][] { { 5, 6 }, { 7, 8 } };
Matrix m1 = new MatrixCustomStorage(data1);
Matrix m2 = new MatrixCustomStorage(data2);
Matrix mult = m1.times(m2);
System.out.println(">>> First matrix: ");
MatrixExampleUtil.print(m1);
System.out.println(">>> Second matrix: ");
MatrixExampleUtil.print(m2);
System.out.println(">>> Matrix product: ");
MatrixExampleUtil.print(mult);
System.out.println("\n>>> Calculating matrices determinants.");
double det1 = m1.determinant();
double det2 = m2.determinant();
double detMult = mult.determinant();
boolean detMultIsAsExp = Math.abs(detMult - det1 * det2) < 0.0001d;
System.out.println(">>> First matrix determinant: [" + det1 + "].");
System.out.println(">>> Second matrix determinant: [" + det2 + "].");
System.out.println(">>> Matrix product determinant: [" + detMult + "], equals product of two other matrices determinants: [" + detMultIsAsExp + "].");
System.out.println("Determinant of product matrix [" + detMult + "] should be equal to product of determinants [" + (det1 * det2) + "].");
System.out.println("\n>>> Matrix API usage example completed.");
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class MLPLocalTrainerExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
// IMPL NOTE based on MLPLocalTrainerTest#testXORRProp
System.out.println(">>> Local multilayer perceptron example started.");
Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
System.out.println("\n>>> Input data:");
Tracer.showAscii(xorInputs);
Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(1234L), xorInputs, xorOutputs, 4);
System.out.println("\n>>> Perform training.");
MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, RPropUpdateCalculator::new, 0.0001, 16000).train(trainerInput);
System.out.println("\n>>> Apply model.");
Matrix predict = mlp.apply(xorInputs);
System.out.println("\n>>> Predicted data:");
Tracer.showAscii(predict);
System.out.println("\n>>> Reference expected data:");
Tracer.showAscii(xorOutputs);
System.out.println("\n>>> Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
System.out.println("\n>>> Local multilayer perceptron example completed.");
}
Aggregations