use of de.lmu.ifi.dbs.elki.math.linearalgebra.pca.SortedEigenPairs in project elki by elki-project.
the class GlobalPrincipalComponentAnalysisTransform method prepareComplete.
@Override
protected void prepareComplete() {
mean = covmat.getMeanVector();
PCAResult pcares = (new PCARunner(null)).processCovarMatrix(covmat.destroyToPopulationMatrix());
SortedEigenPairs eps = pcares.getEigenPairs();
covmat = null;
if (filter == null) {
proj = new double[dim][dim];
for (int d = 0; d < dim; d++) {
EigenPair ep = eps.getEigenPair(d);
double[] ev = ep.getEigenvector();
double mult = 1. / FastMath.sqrt(ep.getEigenvalue());
// Fill weighted and transposed:
for (int i = 0; i < dim; i++) {
proj[d][i] = ev[i] * mult;
}
}
} else {
final int pdim = filter.filter(eps.eigenValues());
if (LOG.isVerbose()) {
LOG.verbose("Reducing dimensionality from " + dim + " to " + pdim + " via PCA.");
}
proj = new double[pdim][dim];
for (int d = 0; d < pdim; d++) {
EigenPair ep = eps.getEigenPair(d);
double[] ev = ep.getEigenvector();
double mult = 1. / FastMath.sqrt(ep.getEigenvalue());
// Fill weighted and transposed:
for (int i = 0; i < dim; i++) {
proj[d][i] = ev[i] * mult;
}
}
}
buf = new double[dim];
}
use of de.lmu.ifi.dbs.elki.math.linearalgebra.pca.SortedEigenPairs in project elki by elki-project.
the class LinearDiscriminantAnalysisFilter method computeProjectionMatrix.
@Override
protected double[][] computeProjectionMatrix(List<V> vectorcolumn, List<? extends ClassLabel> classcolumn, int dim) {
Map<ClassLabel, IntList> classes = partition(classcolumn);
// Fix indexing of classes:
List<ClassLabel> keys = new ArrayList<>(classes.keySet());
// Compute centroids:
List<Centroid> centroids = computeCentroids(dim, vectorcolumn, keys, classes);
final double[][] sigmaB, sigmaI;
// Between classes covariance:
{
CovarianceMatrix covmake = new CovarianceMatrix(dim);
for (Centroid c : centroids) {
covmake.put(c);
}
sigmaB = covmake.destroyToSampleMatrix();
}
{
// (Average) within class variance:
CovarianceMatrix covmake = new CovarianceMatrix(dim);
int numc = keys.size();
for (int i = 0; i < numc; i++) {
double[] c = centroids.get(i).getArrayRef();
// TODO: different weighting strategies? Sampling?
for (IntIterator it = classes.get(keys.get(i)).iterator(); it.hasNext(); ) {
covmake.put(minusEquals(vectorcolumn.get(it.nextInt()).toArray(), c));
}
}
sigmaI = covmake.destroyToSampleMatrix();
if (new LUDecomposition(sigmaI).det() == 0) {
for (int i = 0; i < dim; i++) {
sigmaI[i][i] += 1e-10;
}
}
}
double[][] sol = times(inverse(sigmaI), sigmaB);
EigenvalueDecomposition decomp = new EigenvalueDecomposition(sol);
SortedEigenPairs sorted = new SortedEigenPairs(decomp, false);
return transpose(sorted.eigenVectors(tdim));
}
Aggregations