use of org.apache.commons.math3.linear.QRDecomposition in project gatk by broadinstitute.
the class SingularValueDecomposerUnitTest method isUnitaryMatrix.
/**
* Check that the given matrix is unitary.
*/
public static boolean isUnitaryMatrix(final RealMatrix m) {
//Note can't use MatrixUtils.inverse because m may not be square
final RealMatrix mInv = new QRDecomposition(m).getSolver().getInverse();
final RealMatrix mT = m.transpose();
for (int i = 0; i < mInv.getRowDimension(); i++) {
for (int j = 0; j < mInv.getColumnDimension(); j++) {
if (Math.abs(mInv.getEntry(i, j) - mT.getEntry(i, j)) > 1.0e-7) {
return false;
}
}
}
return true;
}
use of org.apache.commons.math3.linear.QRDecomposition in project gatk-protected by broadinstitute.
the class DecomposeSingularValuesIntegrationTest method assertUnitaryMatrix.
/**
* Assert that the given matrix is unitary.
* @param m
*/
public static void assertUnitaryMatrix(final RealMatrix m) {
final RealMatrix mInv = new QRDecomposition(m).getSolver().getInverse();
final RealMatrix mT = m.transpose();
for (int i = 0; i < mInv.getRowDimension(); i++) {
for (int j = 0; j < mInv.getColumnDimension(); j++) {
Assert.assertEquals(mInv.getEntry(i, j), mT.getEntry(i, j), 1e-7);
}
}
}
use of org.apache.commons.math3.linear.QRDecomposition in project knime-core by knime.
the class IrlsLearner method learn.
/**
* {@inheritDoc}
*/
@Override
public LogRegLearnerResult learn(final TrainingData<ClassificationTrainingRow> trainingData, final ExecutionMonitor exec) throws CanceledExecutionException, InvalidSettingsException {
exec.checkCanceled();
int iter = 0;
boolean converged = false;
final int tcC = trainingData.getTargetDimension() + 1;
final int rC = trainingData.getFeatureCount() - 1;
final RealMatrix beta = MatrixUtils.createRealMatrix(1, (tcC - 1) * (rC + 1));
Double loglike = 0.0;
Double loglikeOld = 0.0;
exec.setMessage("Iterative optimization. Processing iteration 1.");
// main loop
while (iter < m_maxIter && !converged) {
RealMatrix betaOld = beta.copy();
loglikeOld = loglike;
// Do heavy work in a separate thread which allows to interrupt it
// note the queue may block if no more threads are available (e.g. thread count = 1)
// as soon as we stall in 'get' this thread reduces the number of running thread
Future<Double> future = ThreadPool.currentPool().enqueue(new Callable<Double>() {
@Override
public Double call() throws Exception {
final ExecutionMonitor progMon = exec.createSubProgress(1.0 / m_maxIter);
irlsRls(trainingData, beta, rC, tcC, progMon);
progMon.setProgress(1.0);
return likelihood(trainingData.iterator(), beta, rC, tcC, exec);
}
});
try {
loglike = future.get();
} catch (InterruptedException e) {
future.cancel(true);
exec.checkCanceled();
throw new RuntimeException(e);
} catch (ExecutionException e) {
if (e.getCause() instanceof RuntimeException) {
throw (RuntimeException) e.getCause();
} else {
throw new RuntimeException(e.getCause());
}
}
if (Double.isInfinite(loglike) || Double.isNaN(loglike)) {
throw new RuntimeException(FAILING_MSG);
}
exec.checkCanceled();
// test for decreasing likelihood
while ((Double.isInfinite(loglike) || Double.isNaN(loglike) || loglike < loglikeOld) && iter > 0) {
converged = true;
for (int k = 0; k < beta.getColumnDimension(); k++) {
if (abs(beta.getEntry(0, k) - betaOld.getEntry(0, k)) > m_eps * abs(betaOld.getEntry(0, k))) {
converged = false;
break;
}
}
if (converged) {
break;
}
// half the step size of beta
beta.setSubMatrix((beta.add(betaOld)).scalarMultiply(0.5).getData(), 0, 0);
exec.checkCanceled();
loglike = likelihood(trainingData.iterator(), beta, rC, tcC, exec);
exec.checkCanceled();
}
// test for convergence
converged = true;
for (int k = 0; k < beta.getColumnDimension(); k++) {
if (abs(beta.getEntry(0, k) - betaOld.getEntry(0, k)) > m_eps * abs(betaOld.getEntry(0, k))) {
converged = false;
break;
}
}
iter++;
LOGGER.debug("#Iterations: " + iter);
LOGGER.debug("Log Likelihood: " + loglike);
StringBuilder betaBuilder = new StringBuilder();
for (int i = 0; i < beta.getColumnDimension() - 1; i++) {
betaBuilder.append(Double.toString(beta.getEntry(0, i)));
betaBuilder.append(", ");
}
if (beta.getColumnDimension() > 0) {
betaBuilder.append(Double.toString(beta.getEntry(0, beta.getColumnDimension() - 1)));
}
LOGGER.debug("beta: " + betaBuilder.toString());
exec.checkCanceled();
exec.setMessage("Iterative optimization. #Iterations: " + iter + " | Log-likelihood: " + DoubleFormat.formatDouble(loglike) + ". Processing iteration " + (iter + 1) + ".");
}
StringBuilder warnBuilder = new StringBuilder();
if (iter >= m_maxIter) {
warnBuilder.append("The algorithm did not reach convergence after the specified number of epochs. " + "Setting the epoch limit higher might result in a better model.");
}
// The covariance matrix
RealMatrix covMat = null;
if (m_calcCovMatrix) {
try {
covMat = new QRDecomposition(A).getSolver().getInverse().scalarMultiply(-1);
} catch (SingularMatrixException sme) {
if (warnBuilder.length() > 0) {
warnBuilder.append("\n");
}
warnBuilder.append("The covariance matrix could not be calculated because the" + " observed fisher information matrix was singular.");
}
}
RealMatrix betaMat = MatrixUtils.createRealMatrix(tcC - 1, rC + 1);
for (int i = 0; i < beta.getColumnDimension(); i++) {
int r = i / (rC + 1);
int c = i % (rC + 1);
betaMat.setEntry(r, c, beta.getEntry(0, i));
}
m_warning = warnBuilder.length() > 0 ? warnBuilder.toString() : null;
return new LogRegLearnerResult(betaMat, covMat, iter, loglike);
}
use of org.apache.commons.math3.linear.QRDecomposition in project knime-core by knime.
the class IrlsLearner method likelihood.
// private RealMatrix getStdErrorMatrix(final RealMatrix xTwx) {
// RealMatrix covMat = new QRDecomposition(xTwx).getSolver().getInverse().scalarMultiply(-1);
// // the standard error estimate
// RealMatrix stdErr = MatrixUtils.createRealMatrix(covMat.getColumnDimension(),
// covMat.getRowDimension());
// for (int i = 0; i < covMat.getRowDimension(); i++) {
// stdErr.setEntry(i, i, sqrt(abs(covMat.getEntry(i, i))));
// }
// return stdErr;
// }
/**
* Compute the likelihood at given beta.
*
* @param iter iterator over trainings data.
* @param beta parameter vector
* @param rC regressors count
* @param tcC target category count
* @throws CanceledExecutionException when method is cancelled
*/
private double likelihood(final Iterator<ClassificationTrainingRow> iter, final RealMatrix beta, final int rC, final int tcC, final ExecutionMonitor exec) throws CanceledExecutionException {
double loglike = 0;
RealMatrix x = MatrixUtils.createRealMatrix(1, rC + 1);
while (iter.hasNext()) {
exec.checkCanceled();
ClassificationTrainingRow row = iter.next();
fillXFromRow(x, row);
double sumEBetaTx = 0;
for (int i = 0; i < tcC - 1; i++) {
RealMatrix betaITx = x.multiply(beta.getSubMatrix(0, 0, i * (rC + 1), (i + 1) * (rC + 1) - 1).transpose());
sumEBetaTx += Math.exp(betaITx.getEntry(0, 0));
}
int y = row.getCategory();
double yBetaTx = 0;
if (y < tcC - 1) {
yBetaTx = x.multiply(beta.getSubMatrix(0, 0, y * (rC + 1), (y + 1) * (rC + 1) - 1).transpose()).getEntry(0, 0);
}
loglike += yBetaTx - Math.log(1 + sumEBetaTx);
}
return loglike;
}
use of org.apache.commons.math3.linear.QRDecomposition in project knime-core by knime.
the class AbstractSGOptimizer method calculateCovariateMatrix.
private RealMatrix calculateCovariateMatrix(final WeightMatrix<T> beta) {
final RealMatrix llHessian = MatrixUtils.createRealMatrix(m_loss.hessian(m_data, beta));
final RealMatrix priorHessian = m_regUpdater.hessian(beta);
RealMatrix observedInformation = llHessian.add(priorHessian);
RealMatrix covMat = new QRDecomposition(observedInformation).getSolver().getInverse().scalarMultiply(-1);
return covMat;
}
Aggregations