use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class IndexTree method initialize.
/**
* Initializes the index.
*
* @param exampleLeaf an object that will be stored in the index
*/
protected final void initialize(E exampleLeaf) {
initializeCapacities(exampleLeaf);
// create empty root
createEmptyRoot(exampleLeaf);
final Logging log = getLogger();
if (log.isStatistics()) {
String cls = this.getClass().getName();
log.statistics(new LongStatistic(cls + ".directory.capacity", dirCapacity));
log.statistics(new LongStatistic(cls + ".directory.minfill", dirMinimum));
log.statistics(new LongStatistic(cls + ".leaf.capacity", leafCapacity));
log.statistics(new LongStatistic(cls + ".leaf.minfill", leafMinimum));
}
initialized = true;
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class MetricalIndexApproximationMaterializeKNNPreprocessor method preprocess.
@Override
protected void preprocess() {
final Logging log = getLogger();
DistanceQuery<O> distanceQuery = relation.getDistanceQuery(distanceFunction);
MetricalIndexTree<O, N, E> index = getMetricalIndex(relation);
createStorage();
MeanVariance pagesize = new MeanVariance();
MeanVariance ksize = new MeanVariance();
if (log.isVerbose()) {
log.verbose("Approximating nearest neighbor lists to database objects");
}
List<E> leaves = index.getLeaves();
FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Processing leaf nodes", leaves.size(), getLogger()) : null;
for (E leaf : leaves) {
N node = index.getNode(leaf);
int size = node.getNumEntries();
pagesize.put(size);
if (log.isDebuggingFinest()) {
log.debugFinest("NumEntires = " + size);
}
// Collect the ids in this node.
ArrayModifiableDBIDs ids = DBIDUtil.newArray(size);
for (int i = 0; i < size; i++) {
ids.add(((LeafEntry) node.getEntry(i)).getDBID());
}
Object2DoubleOpenHashMap<DBIDPair> cache = new Object2DoubleOpenHashMap<>((size * size * 3) >> 2);
cache.defaultReturnValue(Double.NaN);
for (DBIDIter id = ids.iter(); id.valid(); id.advance()) {
KNNHeap kNN = DBIDUtil.newHeap(k);
for (DBIDIter id2 = ids.iter(); id2.valid(); id2.advance()) {
DBIDPair key = DBIDUtil.newPair(id, id2);
double d = cache.removeDouble(key);
if (d == d) {
// Not NaN
// consume the previous result.
kNN.insert(d, id2);
} else {
// compute new and store the previous result.
d = distanceQuery.distance(id, id2);
kNN.insert(d, id2);
// put it into the cache, but with the keys reversed
key = DBIDUtil.newPair(id2, id);
cache.put(key, d);
}
}
ksize.put(kNN.size());
storage.put(id, kNN.toKNNList());
}
if (log.isDebugging() && cache.size() > 0) {
log.warning("Cache should be empty after each run, but still has " + cache.size() + " elements.");
}
log.incrementProcessed(progress);
}
log.ensureCompleted(progress);
if (log.isVerbose()) {
log.verbose("Average page size = " + pagesize.getMean() + " +- " + pagesize.getSampleStddev());
log.verbose("On average, " + ksize.getMean() + " +- " + ksize.getSampleStddev() + " neighbors returned.");
}
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class AbstractMTree method insert.
/**
* Inserts the specified object into this M-Tree.
*
* @param entry the entry to be inserted
* @param withPreInsert if this flag is true, the preInsert method will be
* called before inserting the object
*/
// todo: implement a bulk load for M-Tree and remove this method
public void insert(E entry, boolean withPreInsert) {
final Logging log = getLogger();
if (log.isDebugging()) {
log.debugFine("insert " + entry.getRoutingObjectID());
}
if (!initialized) {
initialize(entry);
}
// choose subtree for insertion
IndexTreePath<E> subtree = settings.insertStrategy.choosePath(this, entry);
if (log.isDebugging()) {
log.debugFine("insertion-subtree " + subtree);
}
// determine parent distance
E parentEntry = subtree.getEntry();
entry.setParentDistance(distance(parentEntry.getRoutingObjectID(), entry.getRoutingObjectID()));
// create leaf entry and do pre insert
if (withPreInsert) {
preInsert(entry);
}
// get parent node
N parent = getNode(parentEntry);
parent.addLeafEntry(entry);
writeNode(parent);
// adjust the tree from subtree to root
adjustTree(subtree);
// test
if (EXTRA_INTEGRITY_CHECKS) {
getRoot().integrityCheck(this, getRootEntry());
}
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class ThumbnailVisualization method doThumbnail.
@Override
public synchronized void doThumbnail() {
pendingThumbnail = null;
try {
VisualizationPlot plot = new VisualizationPlot();
plot.getRoot().setAttribute(SVGConstants.SVG_VIEW_BOX_ATTRIBUTE, "0 0 " + getWidth() + " " + getHeight());
// Work on a clone
Visualization vis = visFactory.makeVisualization(context, task, plot, getWidth(), getHeight(), proj);
plot.getRoot().appendChild(vis.getLayer());
plot.updateStyleElement();
final int tw = (int) (getWidth() * tresolution);
final int th = (int) (getHeight() * tresolution);
thumb = plot.makeAWTImage(tw, th);
thumbid = ThumbnailRegistryEntry.registerImage(thumb);
// The visualization will not be used anymore.
vis.destroy();
svgp.requestRedraw(this.task, this);
} catch (Exception e) {
final Logging logger = Logging.getLogger(task.getFactory().getClass());
if (logger != null && logger.isDebugging()) {
logger.exception("Thumbnail for " + task.getFactory() + " failed.", e);
} else {
LoggingUtil.warning("Thumbnail for " + task.getFactory() + " failed - enable debugging to see details.");
}
// TODO: hide the failed image?
}
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class SLINK method run.
/**
* Performs the SLINK algorithm on the given database.
*
* @param database Database to process
* @param relation Data relation to use
*/
public PointerHierarchyRepresentationResult run(Database database, Relation<O> relation) {
DBIDs ids = relation.getDBIDs();
WritableDBIDDataStore pi = DataStoreUtil.makeDBIDStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC);
WritableDoubleDataStore lambda = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_STATIC, Double.POSITIVE_INFINITY);
// Temporary storage for m.
WritableDoubleDataStore m = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP);
// To allow CLINK logger override
final Logging log = getLogger();
FiniteProgress progress = log.isVerbose() ? new FiniteProgress("Running SLINK", ids.size(), log) : null;
ArrayDBIDs aids = DBIDUtil.ensureArray(ids);
// First element is trivial/special:
DBIDArrayIter id = aids.iter(), it = aids.iter();
// Step 1: initialize
for (; id.valid(); id.advance()) {
// P(n+1) = n+1:
pi.put(id, id);
// L(n+1) = infinity already.
}
// First element is finished already (start at seek(1) below!)
log.incrementProcessed(progress);
// Optimized branch
if (getDistanceFunction() instanceof PrimitiveDistanceFunction) {
PrimitiveDistanceFunction<? super O> distf = (PrimitiveDistanceFunction<? super O>) getDistanceFunction();
for (id.seek(1); id.valid(); id.advance()) {
step2primitive(id, it, id.getOffset(), relation, distf, m);
// SLINK or CLINK
process(id, aids, it, id.getOffset(), pi, lambda, m);
log.incrementProcessed(progress);
}
} else {
// Fallback branch
DistanceQuery<O> distQ = database.getDistanceQuery(relation, getDistanceFunction());
for (id.seek(1); id.valid(); id.advance()) {
step2(id, it, id.getOffset(), distQ, m);
// SLINK or CLINK
process(id, aids, it, id.getOffset(), pi, lambda, m);
log.incrementProcessed(progress);
}
}
log.ensureCompleted(progress);
// We don't need m anymore.
m.destroy();
m = null;
return new PointerHierarchyRepresentationResult(ids, pi, lambda, getDistanceFunction().isSquared());
}
Aggregations