use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class DistributedLinearRegressionWithSGDTrainerExample method main.
/**
* Run example.
*/
public static void main(String[] args) throws InterruptedException {
System.out.println();
System.out.println(">>> Linear regression model over sparse distributed matrix API usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
// Create IgniteThread, we must work with SparseDistributedMatrix inside IgniteThread
// because we create ignite cache internally.
IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(), SparseDistributedMatrixExample.class.getSimpleName(), () -> {
// Create SparseDistributedMatrix, new cache will be created automagically.
System.out.println(">>> Create new SparseDistributedMatrix inside IgniteThread.");
SparseDistributedMatrix distributedMatrix = new SparseDistributedMatrix(data);
System.out.println(">>> Create new linear regression trainer object.");
Trainer<LinearRegressionModel, Matrix> trainer = new LinearRegressionSGDTrainer(100_000, 1e-12);
System.out.println(">>> Perform the training to get the model.");
LinearRegressionModel model = trainer.train(distributedMatrix);
System.out.println(">>> Linear regression model: " + model);
System.out.println(">>> ---------------------------------");
System.out.println(">>> | Prediction\t| Ground Truth\t|");
System.out.println(">>> ---------------------------------");
for (double[] observation : data) {
Vector inputs = new SparseDistributedVector(Arrays.copyOfRange(observation, 1, observation.length));
double prediction = model.apply(inputs);
double groundTruth = observation[0];
System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", prediction, groundTruth);
}
System.out.println(">>> ---------------------------------");
});
igniteThread.start();
igniteThread.join();
}
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class OLSMultipleLinearRegression method calculateHat.
/**
* <p>Compute the "hat" matrix.
* </p>
* <p>The hat matrix is defined in terms of the design matrix X
* by X(X<sup>T</sup>X)<sup>-1</sup>X<sup>T</sup>
* </p>
* <p>The implementation here uses the QR decomposition to compute the
* hat matrix as Q I<sub>p</sub>Q<sup>T</sup> where I<sub>p</sub> is the
* p-dimensional identity matrix augmented by 0's. This computational
* formula is from "The Hat Matrix in Regression and ANOVA",
* David C. Hoaglin and Roy E. Welsch,
* <i>The American Statistician</i>, Vol. 32, No. 1 (Feb., 1978), pp. 17-22.
* </p>
* <p>Data for the model must have been successfully loaded using one of
* the {@code newSampleData} methods before invoking this method; otherwise
* a {@code NullPointerException} will be thrown.</p>
*
* @return the hat matrix
* @throws NullPointerException unless method {@code newSampleData} has been called beforehand.
*/
public Matrix calculateHat() {
// Create augmented identity matrix
// No try-catch or advertised NotStrictlyPositiveException - NPE above if n < 3
Matrix q = qr.getQ();
Matrix augI = MatrixUtil.like(q, q.columnSize(), q.columnSize());
int n = augI.columnSize();
int p = qr.getR().columnSize();
for (int i = 0; i < n; i++) for (int j = 0; j < n; j++) if (i == j && i < p)
augI.setX(i, j, 1d);
else
augI.setX(i, j, 0d);
// No DME advertised - args valid if we get here
return q.times(augI).times(q.transpose());
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class OLSMultipleLinearRegression method calculateBetaVariance.
/**
* <p>Calculates the variance-covariance matrix of the regression parameters.
* </p>
* <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup>
* </p>
* <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup>
* to (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of
* R included, where p = the length of the beta vector.</p>
*
* <p>Data for the model must have been successfully loaded using one of
* the {@code newSampleData} methods before invoking this method; otherwise
* a {@code NullPointerException} will be thrown.</p>
*
* @return The beta variance-covariance matrix
* @throws SingularMatrixException if the design matrix is singular
* @throws NullPointerException if the data for the model have not been loaded
*/
@Override
protected Matrix calculateBetaVariance() {
int p = getX().columnSize();
Matrix rAug = MatrixUtil.copy(qr.getR().viewPart(0, p, 0, p));
Matrix rInv = rAug.inverse();
return rInv.times(rInv.transpose());
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class QRDecomposition method solve.
/**
* Least squares solution of {@code A*X = B}; {@code returns X}.
*
* @param mtx A matrix with as many rows as {@code A} and any number of cols.
* @return {@code X<} that minimizes the two norm of {@code Q*R*X - B}.
* @throws IllegalArgumentException if {@code B.rows() != A.rows()}.
*/
public Matrix solve(Matrix mtx) {
if (mtx.rowSize() != rows)
throw new IllegalArgumentException("Matrix row dimensions must agree.");
int cols = mtx.columnSize();
Matrix r = getR();
checkSingular(r, threshold, true);
Matrix x = like(mType, this.cols, cols);
Matrix qt = getQ().transpose();
Matrix y = qt.times(mtx);
for (int k = Math.min(this.cols, rows) - 1; k >= 0; k--) {
// X[k,] = Y[k,] / R[k,k], note that X[k,] starts with 0 so += is same as =
x.viewRow(k).map(y.viewRow(k), Functions.plusMult(1 / r.get(k, k)));
if (k == 0)
continue;
// Y[0:(k-1),] -= R[0:(k-1),k] * X[k,]
Vector rCol = r.viewColumn(k).viewPart(0, k);
for (int c = 0; c < cols; c++) y.viewColumn(c).viewPart(0, k).map(rCol, Functions.plusMult(-x.get(k, c)));
}
return x;
}
use of org.apache.ignite.ml.math.Matrix in project ignite by apache.
the class AbstractMultipleLinearRegressionTest method canEstimateRegressionParametersVariance.
/** */
@Test
public void canEstimateRegressionParametersVariance() {
Matrix var = regression.estimateRegressionParametersVariance();
Assert.assertEquals(getNumberOfRegressors(), var.rowSize());
}
Aggregations