use of de.lmu.ifi.dbs.elki.logging.statistics.Duration in project elki by elki-project.
the class PerplexityAffinityMatrixBuilder method computePij.
/**
* Compute the pij from the distance matrix.
*
* @param dist Distance matrix.
* @param perplexity Desired perplexity
* @param initialScale Initial scale
* @return Affinity matrix pij
*/
protected static double[][] computePij(double[][] dist, double perplexity, double initialScale) {
final int size = dist.length;
final double logPerp = FastMath.log(perplexity);
double[][] pij = new double[size][size];
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Optimizing perplexities", size, LOG) : null;
Duration timer = LOG.isStatistics() ? LOG.newDuration(PerplexityAffinityMatrixBuilder.class.getName() + ".runtime.pijmatrix").begin() : null;
MeanVariance mv = LOG.isStatistics() ? new MeanVariance() : null;
for (int i = 0; i < size; i++) {
double beta = computePi(i, dist[i], pij[i], perplexity, logPerp);
if (mv != null) {
// Sigma
mv.put(beta > 0 ? FastMath.sqrt(.5 / beta) : 0.);
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (LOG.isStatistics()) {
// timer != null, mv != null
LOG.statistics(timer.end());
LOG.statistics(new DoubleStatistic(PerplexityAffinityMatrixBuilder.class.getName() + ".sigma.average", mv.getMean()));
LOG.statistics(new DoubleStatistic(PerplexityAffinityMatrixBuilder.class.getName() + ".sigma.stddev", mv.getSampleStddev()));
}
// Scale pij to have the desired sum EARLY_EXAGGERATION
double sum = 0.;
for (int i = 1; i < size; i++) {
final double[] pij_i = pij[i];
for (int j = 0; j < i; j++) {
// Nur über halbe Matrix!
// Symmetrie herstellen
sum += (pij_i[j] += pij[j][i]);
}
}
// Scaling taken from original tSNE code:
final double scale = initialScale / (2. * sum);
for (int i = 1; i < size; i++) {
final double[] pij_i = pij[i];
for (int j = 0; j < i; j++) {
pij_i[j] = pij[j][i] = MathUtil.max(pij_i[j] * scale, MIN_PIJ);
}
}
return pij;
}
use of de.lmu.ifi.dbs.elki.logging.statistics.Duration in project elki by elki-project.
the class SNE method optimizeSNE.
/**
* Perform the actual tSNE optimization.
*
* @param pij Initial affinity matrix
* @param sol Solution output array (preinitialized)
*/
protected void optimizeSNE(AffinityMatrix pij, double[][] sol) {
final int size = pij.size();
if (size * 3L * dim > 0x7FFF_FFFAL) {
throw new AbortException("Memory exceeds Java array size limit.");
}
// Meta information on each point; joined for memory locality.
// Gradient, Momentum, and learning rate
// For performance, we use a flat memory layout!
double[] meta = new double[size * 3 * dim];
final int dim3 = dim * 3;
for (int off = 2 * dim; off < meta.length; off += dim3) {
// Initial learning rate
Arrays.fill(meta, off, off + dim, 1.);
}
// Affinity matrix in projected space
double[][] qij = new double[size][size];
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Iterative Optimization", iterations, LOG) : null;
Duration timer = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".runtime.optimization").begin() : null;
// Optimize
for (int it = 0; it < iterations; it++) {
double qij_sum = computeQij(qij, sol);
computeGradient(pij, qij, qij_sum, sol, meta);
updateSolution(sol, meta, it);
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (timer != null) {
LOG.statistics(timer.end());
}
}
use of de.lmu.ifi.dbs.elki.logging.statistics.Duration in project elki by elki-project.
the class TSNE method optimizetSNE.
/**
* Perform the actual tSNE optimization.
*
* @param pij Initial affinity matrix
* @param sol Solution output array (preinitialized)
*/
protected void optimizetSNE(AffinityMatrix pij, double[][] sol) {
final int size = pij.size();
if (size * 3L * dim > 0x7FFF_FFFAL) {
throw new AbortException("Memory exceeds Java array size limit.");
}
// Meta information on each point; joined for memory locality.
// Gradient, Momentum, and learning rate
// For performance, we use a flat memory layout!
double[] meta = new double[size * 3 * dim];
final int dim3 = dim * 3;
for (int off = 2 * dim; off < meta.length; off += dim3) {
// Initial learning rate
Arrays.fill(meta, off, off + dim, 1.);
}
// Affinity matrix in projected space
double[][] qij = new double[size][size];
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Iterative Optimization", iterations, LOG) : null;
Duration timer = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".runtime.optimization").begin() : null;
// Optimize
for (int it = 0; it < iterations; it++) {
double qij_sum = computeQij(qij, sol);
computeGradient(pij, qij, qij_sum, sol, meta);
updateSolution(sol, meta, it);
// Undo early exaggeration
if (it == EARLY_EXAGGERATION_ITERATIONS) {
pij.scale(1. / EARLY_EXAGGERATION);
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (timer != null) {
LOG.statistics(timer.end());
}
}
use of de.lmu.ifi.dbs.elki.logging.statistics.Duration in project elki by elki-project.
the class BarnesHutTSNE method optimizetSNE.
/**
* Perform the actual tSNE optimization.
*
* @param pij Sparse initial affinity matrix
* @param sol Solution output array (preinitialized)
*/
protected void optimizetSNE(AffinityMatrix pij, double[][] sol) {
final int size = pij.size();
if (size * 3L * dim > 0x7FFF_FFFAL) {
throw new AbortException("Memory exceeds Java array size limit.");
}
// Meta information on each point; joined for memory locality.
// Gradient, Momentum, and learning rate
// For performance, we use a flat memory layout!
double[] meta = new double[size * 3 * dim];
final int dim3 = dim * 3;
for (int off = 2 * dim; off < meta.length; off += dim3) {
// Initial learning rate
Arrays.fill(meta, off, off + dim, 1.);
}
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Iterative Optimization", iterations, LOG) : null;
Duration timer = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".runtime.optimization").begin() : null;
// Optimize
for (int i = 0; i < iterations; i++) {
computeGradient(pij, sol, meta);
updateSolution(sol, meta, i);
// Undo early exaggeration
if (i == EARLY_EXAGGERATION_ITERATIONS) {
pij.scale(1. / EARLY_EXAGGERATION);
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (timer != null) {
LOG.statistics(timer.end());
}
}
use of de.lmu.ifi.dbs.elki.logging.statistics.Duration in project elki by elki-project.
the class GaussianAffinityMatrixBuilder method computePij.
/**
* Compute the pij from the distance matrix.
*
* @param dist Distance matrix.
* @param sigma Kernel bandwidth sigma
* @param initialScale Initial scale
* @return Affinity matrix pij
*/
protected static double[][] computePij(double[][] dist, double sigma, double initialScale) {
final int size = dist.length;
final double msigmasq = -.5 / (sigma * sigma);
double[][] pij = new double[size][size];
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Computing affinities", size, LOG) : null;
Duration timer = LOG.isStatistics() ? LOG.newDuration(GaussianAffinityMatrixBuilder.class.getName() + ".runtime.pijmatrix").begin() : null;
MeanVariance mv = LOG.isStatistics() ? new MeanVariance() : null;
for (int i = 0; i < size; i++) {
double logP = computeH(i, dist[i], pij[i], msigmasq);
if (mv != null) {
mv.put(FastMath.exp(logP));
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (LOG.isStatistics()) {
// timer != null, mv != null
LOG.statistics(timer.end());
LOG.statistics(new DoubleStatistic(GaussianAffinityMatrixBuilder.class.getName() + ".perplexity.average", mv.getMean()));
LOG.statistics(new DoubleStatistic(GaussianAffinityMatrixBuilder.class.getName() + ".perplexity.stddev", mv.getSampleStddev()));
}
// Scale pij to have the desired sum EARLY_EXAGGERATION
double sum = 0.;
for (int i = 1; i < size; i++) {
final double[] pij_i = pij[i];
for (int j = 0; j < i; j++) {
// Nur über halbe Matrix!
// Symmetrie herstellen
sum += (pij_i[j] += pij[j][i]);
}
}
// Scaling taken from original tSNE code:
final double scale = initialScale / (2. * sum);
for (int i = 1; i < size; i++) {
final double[] pij_i = pij[i];
for (int j = 0; j < i; j++) {
pij_i[j] = pij[j][i] = MathUtil.max(pij_i[j] * scale, MIN_PIJ);
}
}
return pij;
}
Aggregations