use of de.lmu.ifi.dbs.elki.database.ids.DoubleDBIDList in project elki by elki-project.
the class COPACNeighborPredicate method instantiate.
/**
* Full instantiation method.
*
* @param database Database
* @param relation Vector relation
* @return Instance
*/
public COPACNeighborPredicate.Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<COPACModel> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, COPACModel.class);
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
storage.put(iditer, computeLocalModel(iditer, ref, relation));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage);
}
use of de.lmu.ifi.dbs.elki.database.ids.DoubleDBIDList in project elki by elki-project.
the class ERiCNeighborPredicate method instantiate.
/**
* Full instantiation interface.
*
* @param database Database
* @param relation Relation
* @return Instance
*/
public Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<PCAFilteredResult> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, PCAFilteredResult.class);
PCARunner pca = settings.pca;
EigenPairFilter filter = settings.filter;
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
PCAResult pcares = pca.processQueryResult(ref, relation);
storage.put(iditer, new PCAFilteredResult(pcares.getEigenPairs(), filter.filter(pcares.getEigenvalues()), 1., 0.));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage, relation);
}
use of de.lmu.ifi.dbs.elki.database.ids.DoubleDBIDList in project elki by elki-project.
the class AbstractFilteredPCAIndex method initialize.
@Override
public void initialize() {
if (relation == null || relation.size() <= 0) {
throw new EmptyDataException();
}
// recomputed for the partitions!
if (storage != null) {
return;
}
storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, PCAFilteredResult.class);
long start = System.currentTimeMillis();
FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress("Performing local PCA", relation.size(), getLogger()) : null;
// TODO: use a bulk operation?
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList objects = objectsForPCA(iditer);
SortedEigenPairs epairs = pca.processIds(objects, relation).getEigenPairs();
int numstrong = filter.filter(epairs.eigenValues());
PCAFilteredResult pcares = new PCAFilteredResult(epairs, numstrong, 1., 0.);
storage.put(iditer, pcares);
getLogger().incrementProcessed(progress);
}
getLogger().ensureCompleted(progress);
long end = System.currentTimeMillis();
if (getLogger().isVerbose()) {
long elapsedTime = end - start;
getLogger().verbose(this.getClass().getName() + " runtime: " + elapsedTime + " milliseconds.");
}
}
use of de.lmu.ifi.dbs.elki.database.ids.DoubleDBIDList in project elki by elki-project.
the class DWOF method clusterData.
/**
* This method applies a density based clustering algorithm.
*
* It looks for an unclustered object and builds a new cluster for it, then
* adds all the points within its radius to that cluster.
*
* nChain represents the points to be added to the cluster and its
* radius-neighbors
*
* @param ids Database IDs to process
* @param rnnQuery Data to process
* @param radii Radii to cluster accordingly
* @param labels Label storage.
*/
private void clusterData(DBIDs ids, RangeQuery<O> rnnQuery, WritableDoubleDataStore radii, WritableDataStore<ModifiableDBIDs> labels) {
FiniteProgress clustProg = LOG.isVerbose() ? new FiniteProgress("Density-Based Clustering", ids.size(), LOG) : null;
// Iterate over all objects
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
if (labels.get(iter) != null) {
continue;
}
ModifiableDBIDs newCluster = DBIDUtil.newArray();
newCluster.add(iter);
labels.put(iter, newCluster);
LOG.incrementProcessed(clustProg);
// container of the points to be added and their radii neighbors to the
// cluster
ModifiableDBIDs nChain = DBIDUtil.newArray();
nChain.add(iter);
// iterate over nChain
for (DBIDIter toGetNeighbors = nChain.iter(); toGetNeighbors.valid(); toGetNeighbors.advance()) {
double range = radii.doubleValue(toGetNeighbors);
DoubleDBIDList nNeighbors = rnnQuery.getRangeForDBID(toGetNeighbors, range);
for (DoubleDBIDListIter iter2 = nNeighbors.iter(); iter2.valid(); iter2.advance()) {
if (DBIDUtil.equal(toGetNeighbors, iter2)) {
continue;
}
if (labels.get(iter2) == null) {
newCluster.add(iter2);
labels.put(iter2, newCluster);
nChain.add(iter2);
LOG.incrementProcessed(clustProg);
} else if (labels.get(iter2) != newCluster) {
ModifiableDBIDs toBeDeleted = labels.get(iter2);
newCluster.addDBIDs(toBeDeleted);
for (DBIDIter iter3 = toBeDeleted.iter(); iter3.valid(); iter3.advance()) {
labels.put(iter3, newCluster);
}
toBeDeleted.clear();
}
}
}
}
LOG.ensureCompleted(clustProg);
}
Aggregations