use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class AbstractHDBSCAN method computeCoreDists.
/**
* Compute the core distances for all objects.
*
* @param ids Objects
* @param knnQ kNN query
* @param minPts Minimum neighborhood size
* @return Data store with core distances
*/
protected WritableDoubleDataStore computeCoreDists(DBIDs ids, KNNQuery<O> knnQ, int minPts) {
final Logging LOG = getLogger();
final WritableDoubleDataStore coredists = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB);
FiniteProgress cprog = LOG.isVerbose() ? new FiniteProgress("Computing core sizes", ids.size(), LOG) : null;
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
coredists.put(iter, knnQ.getKNNForDBID(iter, minPts).getKNNDistance());
LOG.incrementProcessed(cprog);
}
LOG.ensureCompleted(cprog);
return coredists;
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class MaterializeKNNPreprocessor method objectsInserted.
/**
* Called after new objects have been inserted, updates the materialized
* neighborhood.
*
* @param ids the ids of the newly inserted objects
*/
protected void objectsInserted(DBIDs ids) {
// Could be subclass
final Logging log = getLogger();
StepProgress stepprog = log.isVerbose() ? new StepProgress(3) : null;
ArrayDBIDs aids = DBIDUtil.ensureArray(ids);
// materialize the new kNNs
log.beginStep(stepprog, 1, "New insertions ocurred, materialize their new kNNs.");
// Bulk-query kNNs
List<? extends KNNList> kNNList = knnQuery.getKNNForBulkDBIDs(aids, k);
// Store in storage
DBIDIter iter = aids.iter();
for (int i = 0; i < aids.size(); i++, iter.advance()) {
storage.put(iter, kNNList.get(i));
}
// update the affected kNNs
log.beginStep(stepprog, 2, "New insertions ocurred, update the affected kNNs.");
ArrayDBIDs rkNN_ids = updateKNNsAfterInsertion(ids);
// inform listener
log.beginStep(stepprog, 3, "New insertions ocurred, inform listeners.");
fireKNNsInserted(ids, rkNN_ids);
log.setCompleted(stepprog);
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class MaterializeKNNPreprocessor method preprocess.
/**
* The actual preprocessing step.
*/
@Override
protected void preprocess() {
// Could be subclass
final Logging log = getLogger();
createStorage();
ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs());
if (log.isStatistics()) {
log.statistics(new LongStatistic(this.getClass().getName() + ".k", k));
}
Duration duration = log.isStatistics() ? log.newDuration(this.getClass().getName() + ".precomputation-time").begin() : null;
FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Materializing k nearest neighbors (k=" + k + ")", ids.size(), getLogger()) : null;
// Try bulk
List<? extends KNNList> kNNList = null;
if (usebulk) {
kNNList = knnQuery.getKNNForBulkDBIDs(ids, k);
if (kNNList != null) {
int i = 0;
for (DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) {
storage.put(id, kNNList.get(i));
log.incrementProcessed(progress);
}
}
} else {
final boolean ismetric = getDistanceQuery().getDistanceFunction().isMetric();
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
if (ismetric && storage.get(iter) != null) {
log.incrementProcessed(progress);
// Previously computed (duplicate point?)
continue;
}
KNNList knn = knnQuery.getKNNForDBID(iter, k);
storage.put(iter, knn);
if (ismetric) {
for (DoubleDBIDListIter it = knn.iter(); it.valid() && it.doubleValue() == 0.; it.advance()) {
// Reuse
storage.put(it, knn);
}
}
log.incrementProcessed(progress);
}
}
log.ensureCompleted(progress);
if (duration != null) {
log.statistics(duration.end());
}
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class AbstractRStarTree method adjustTree.
/**
* Adjusts the tree after insertion of some nodes.
*
* @param subtree the subtree to be adjusted
*/
protected void adjustTree(IndexTreePath<E> subtree) {
final Logging log = getLogger();
if (log.isDebugging()) {
log.debugFine("Adjust tree " + subtree);
}
// get the root of the subtree
N node = getNode(subtree.getEntry());
// overflow in node
if (hasOverflow(node)) {
// treatment of overflow: reinsertion or split
N split = overflowTreatment(node, subtree);
// node was split
if (split != null) {
// split nodes
if (isRoot(node)) {
IndexTreePath<E> newRootPath = createNewRoot(node, split);
height++;
adjustTree(newRootPath);
} else // node is not root
{
// get the parent and add the new split node
N parent = getNode(subtree.getParentPath().getEntry());
if (log.isDebugging()) {
log.debugFine("parent " + parent);
}
parent.addDirectoryEntry(createNewDirectoryEntry(split));
// adjust the entry representing the (old) node, that has
// been split
// This does not work in the persistent version
// node.adjustEntry(subtree.getEntry());
node.adjustEntry(parent.getEntry(subtree.getIndex()));
// write changes in parent to file
writeNode(parent);
adjustTree(subtree.getParentPath());
}
}
} else // no overflow, only adjust parameters of the entry representing the
// node
{
if (!isRoot(node)) {
N parent = getNode(subtree.getParentPath().getEntry());
E entry = parent.getEntry(subtree.getIndex());
boolean changed = node.adjustEntryIncremental(entry, lastInsertedEntry);
if (changed) {
// node.adjustEntry(parent.getEntry(index));
// write changes in parent to file
writeNode(parent);
adjustTree(subtree.getParentPath());
}
} else // root level is reached
{
node.adjustEntry(getRootEntry());
}
}
}
use of de.lmu.ifi.dbs.elki.logging.Logging in project elki by elki-project.
the class AbstractRStarTree method reInsert.
/**
* Reinserts the specified node at the specified level.
*
* @param node the node to be reinserted
* @param path the path to the node
* @param offs the nodes indexes to reinsert
*/
public void reInsert(N node, IndexTreePath<E> path, int[] offs) {
final int depth = path.getPathCount();
long[] remove = BitsUtil.zero(node.getCapacity());
List<E> reInsertEntries = new ArrayList<>(offs.length);
for (int i = 0; i < offs.length; i++) {
reInsertEntries.add(node.getEntry(offs[i]));
BitsUtil.setI(remove, offs[i]);
}
// Remove the entries we reinsert
node.removeMask(remove);
writeNode(node);
// and adapt the mbrs
IndexTreePath<E> childPath = path;
N child = node;
while (childPath.getParentPath() != null) {
N parent = getNode(childPath.getParentPath().getEntry());
int indexOfChild = childPath.getIndex();
if (child.adjustEntry(parent.getEntry(indexOfChild))) {
writeNode(parent);
childPath = childPath.getParentPath();
child = parent;
} else {
break;
// TODO: stop writing when MBR didn't change!
}
}
// reinsert the first entries
final Logging log = getLogger();
for (E entry : reInsertEntries) {
if (node.isLeaf()) {
if (log.isDebugging()) {
log.debug("reinsert " + entry);
}
insertLeafEntry(entry);
} else {
if (log.isDebugging()) {
log.debug("reinsert " + entry + " at " + depth);
}
insertDirectoryEntry(entry, depth);
}
}
}
Aggregations