use of de.lmu.ifi.dbs.elki.database.ids.KNNList in project elki by elki-project.
the class AveragePrecisionAtK method run.
/**
* Run the algorithm
*
* @param database Database to run on (for kNN queries)
* @param relation Relation for distance computations
* @param lrelation Relation for class label comparison
* @return Vectors containing mean and standard deviation.
*/
public CollectionResult<double[]> run(Database database, Relation<O> relation, Relation<?> lrelation) {
final DistanceQuery<O> distQuery = database.getDistanceQuery(relation, getDistanceFunction());
final int qk = k + (includeSelf ? 0 : 1);
final KNNQuery<O> knnQuery = database.getKNNQuery(distQuery, qk);
MeanVarianceMinMax[] mvs = MeanVarianceMinMax.newArray(k);
final DBIDs ids = DBIDUtil.randomSample(relation.getDBIDs(), sampling, random);
FiniteProgress objloop = LOG.isVerbose() ? new FiniteProgress("Computing nearest neighbors", ids.size(), LOG) : null;
// sort neighbors
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
KNNList knn = knnQuery.getKNNForDBID(iter, qk);
Object label = lrelation.get(iter);
int positive = 0, i = 0;
for (DBIDIter ri = knn.iter(); i < k && ri.valid(); ri.advance()) {
if (!includeSelf && DBIDUtil.equal(iter, ri)) {
// Do not increment i.
continue;
}
positive += match(label, lrelation.get(ri)) ? 1 : 0;
final double precision = positive / (double) (i + 1);
mvs[i].put(precision);
i++;
}
LOG.incrementProcessed(objloop);
}
LOG.ensureCompleted(objloop);
// Transform Histogram into a Double Vector array.
Collection<double[]> res = new ArrayList<>(k);
for (int i = 0; i < k; i++) {
final MeanVarianceMinMax mv = mvs[i];
final double std = mv.getCount() > 1. ? mv.getSampleStddev() : 0.;
res.add(new double[] { i + 1, mv.getMean(), std, mv.getMin(), mv.getMax(), mv.getCount() });
}
return new CollectionResult<>("Average Precision", "average-precision", res);
}
use of de.lmu.ifi.dbs.elki.database.ids.KNNList in project elki by elki-project.
the class KNNJoin method run.
/**
* Joins in the given spatial database to each object its k-nearest neighbors.
*
* @param relation Relation to process
* @return result
*/
public Relation<KNNList> run(Relation<V> relation) {
DBIDs ids = relation.getDBIDs();
WritableDataStore<KNNList> knnLists = run(relation, ids);
// Wrap as relation:
return new MaterializedRelation<>("k nearest neighbors", "kNNs", TypeUtil.KNNLIST, knnLists, ids);
}
use of de.lmu.ifi.dbs.elki.database.ids.KNNList in project elki by elki-project.
the class KNNJoin method run.
/**
* Inner run method. This returns a double store, and is used by
* {@link de.lmu.ifi.dbs.elki.index.preprocessed.knn.KNNJoinMaterializeKNNPreprocessor}
*
* @param relation Data relation
* @param ids Object IDs
* @return Data store
*/
@SuppressWarnings("unchecked")
public WritableDataStore<KNNList> run(Relation<V> relation, DBIDs ids) {
if (!(getDistanceFunction() instanceof SpatialPrimitiveDistanceFunction)) {
throw new IllegalStateException("Distance Function must be an instance of " + SpatialPrimitiveDistanceFunction.class.getName());
}
Collection<SpatialIndexTree<N, E>> indexes = ResultUtil.filterResults(relation.getHierarchy(), relation, SpatialIndexTree.class);
if (indexes.size() != 1) {
throw new MissingPrerequisitesException("KNNJoin found " + indexes.size() + " spatial indexes, expected exactly one.");
}
// FIXME: Ensure were looking at the right relation!
SpatialIndexTree<N, E> index = indexes.iterator().next();
SpatialPrimitiveDistanceFunction<V> distFunction = (SpatialPrimitiveDistanceFunction<V>) getDistanceFunction();
// data pages
List<E> ps_candidates = new ArrayList<>(index.getLeaves());
// knn heaps
List<List<KNNHeap>> heaps = new ArrayList<>(ps_candidates.size());
// Initialize with the page self-pairing
for (int i = 0; i < ps_candidates.size(); i++) {
E pr_entry = ps_candidates.get(i);
N pr = index.getNode(pr_entry);
heaps.add(initHeaps(distFunction, pr));
}
// Build priority queue
final int sqsize = ps_candidates.size() * (ps_candidates.size() - 1) >>> 1;
ComparableMinHeap<Task> pq = new ComparableMinHeap<>(sqsize);
if (LOG.isDebuggingFine()) {
LOG.debugFine("Number of leaves: " + ps_candidates.size() + " so " + sqsize + " MBR computations.");
}
FiniteProgress mprogress = LOG.isVerbose() ? new FiniteProgress("Comparing leaf MBRs", sqsize, LOG) : null;
for (int i = 0; i < ps_candidates.size(); i++) {
E pr_entry = ps_candidates.get(i);
N pr = index.getNode(pr_entry);
List<KNNHeap> pr_heaps = heaps.get(i);
double pr_knn_distance = computeStopDistance(pr_heaps);
for (int j = i + 1; j < ps_candidates.size(); j++) {
E ps_entry = ps_candidates.get(j);
N ps = index.getNode(ps_entry);
List<KNNHeap> ps_heaps = heaps.get(j);
double ps_knn_distance = computeStopDistance(ps_heaps);
double minDist = distFunction.minDist(pr_entry, ps_entry);
// Resolve immediately:
if (minDist <= 0.) {
processDataPages(distFunction, pr_heaps, ps_heaps, pr, ps);
} else if (minDist <= pr_knn_distance || minDist <= ps_knn_distance) {
pq.add(new Task(minDist, i, j));
}
LOG.incrementProcessed(mprogress);
}
}
LOG.ensureCompleted(mprogress);
// Process the queue
FiniteProgress qprogress = LOG.isVerbose() ? new FiniteProgress("Processing queue", pq.size(), LOG) : null;
IndefiniteProgress fprogress = LOG.isVerbose() ? new IndefiniteProgress("Full comparisons", LOG) : null;
while (!pq.isEmpty()) {
Task task = pq.poll();
List<KNNHeap> pr_heaps = heaps.get(task.i);
List<KNNHeap> ps_heaps = heaps.get(task.j);
double pr_knn_distance = computeStopDistance(pr_heaps);
double ps_knn_distance = computeStopDistance(ps_heaps);
boolean dor = task.mindist <= pr_knn_distance;
boolean dos = task.mindist <= ps_knn_distance;
if (dor || dos) {
N pr = index.getNode(ps_candidates.get(task.i));
N ps = index.getNode(ps_candidates.get(task.j));
if (dor && dos) {
processDataPages(distFunction, pr_heaps, ps_heaps, pr, ps);
} else {
if (dor) {
processDataPages(distFunction, pr_heaps, null, pr, ps);
} else /* dos */
{
processDataPages(distFunction, ps_heaps, null, ps, pr);
}
}
LOG.incrementProcessed(fprogress);
}
LOG.incrementProcessed(qprogress);
}
LOG.ensureCompleted(qprogress);
LOG.setCompleted(fprogress);
WritableDataStore<KNNList> knnLists = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_STATIC, KNNList.class);
FiniteProgress pageprog = LOG.isVerbose() ? new FiniteProgress("Number of processed data pages", ps_candidates.size(), LOG) : null;
for (int i = 0; i < ps_candidates.size(); i++) {
N pr = index.getNode(ps_candidates.get(i));
List<KNNHeap> pr_heaps = heaps.get(i);
// Finalize lists
for (int j = 0; j < pr.getNumEntries(); j++) {
knnLists.put(((LeafEntry) pr.getEntry(j)).getDBID(), pr_heaps.get(j).toKNNList());
}
// Forget heaps and pq
heaps.set(i, null);
LOG.incrementProcessed(pageprog);
}
LOG.ensureCompleted(pageprog);
return knnLists;
}
use of de.lmu.ifi.dbs.elki.database.ids.KNNList in project elki by elki-project.
the class KNNBenchmarkAlgorithm method run.
/**
* Run the algorithm.
*
* @param database Database
* @param relation Relation
* @return Null result
*/
public Result run(Database database, Relation<O> relation) {
// Get a distance and kNN query instance.
DistanceQuery<O> distQuery = database.getDistanceQuery(relation, getDistanceFunction());
KNNQuery<O> knnQuery = database.getKNNQuery(distQuery, k);
// No query set - use original database.
if (queries == null) {
final DBIDs sample = DBIDUtil.randomSample(relation.getDBIDs(), sampling, random);
FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance(), mvdist = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
KNNList knns = knnQuery.getKNNForDBID(iditer, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += DBIDUtil.asInteger(it);
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
mvdist.put(knns.getKNNDistance());
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (LOG.isStatistics()) {
LOG.statistics("Result hashcode: " + hash);
LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev());
if (mvdist.getCount() > 0) {
LOG.statistics("Mean k-distance: " + mvdist.getMean() + " +- " + mvdist.getNaiveStddev());
}
}
} else {
// Separate query set.
TypeInformation res = getDistanceFunction().getInputTypeRestriction();
MultipleObjectsBundle bundle = queries.loadData();
int col = -1;
for (int i = 0; i < bundle.metaLength(); i++) {
if (res.isAssignableFromType(bundle.meta(i))) {
col = i;
break;
}
}
if (col < 0) {
throw new IncompatibleDataException("No compatible data type in query input was found. Expected: " + res.toString());
}
// Random sampling is a bit of hack, sorry.
// But currently, we don't (yet) have an "integer random sample" function.
DBIDRange sids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength());
final DBIDs sample = DBIDUtil.randomSample(sids, sampling, random);
FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance(), mvdist = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
int off = sids.binarySearch(iditer);
assert (off >= 0);
@SuppressWarnings("unchecked") O o = (O) bundle.data(off, col);
KNNList knns = knnQuery.getKNNForObject(o, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += DBIDUtil.asInteger(it);
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
mvdist.put(knns.getKNNDistance());
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
if (LOG.isStatistics()) {
LOG.statistics("Result hashcode: " + hash);
LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev());
if (mvdist.getCount() > 0) {
LOG.statistics("Mean k-distance: " + mvdist.getMean() + " +- " + mvdist.getNaiveStddev());
}
}
}
return null;
}
use of de.lmu.ifi.dbs.elki.database.ids.KNNList in project elki by elki-project.
the class AbstractIndexStructureTest method testExactCosine.
/**
* Actual test routine, for cosine distance
*
* @param inputparams
*/
protected void testExactCosine(ListParameterization inputparams, Class<?> expectKNNQuery, Class<?> expectRangeQuery) {
// Use a fixed DBID - historically, we used 1 indexed - to reduce random
// variation in results due to different hash codes everywhere.
inputparams.addParameter(AbstractDatabaseConnection.Parameterizer.FILTERS_ID, new FixedDBIDsFilter(1));
Database db = AbstractSimpleAlgorithmTest.makeSimpleDatabase(dataset, shoulds, inputparams);
Relation<DoubleVector> rep = db.getRelation(TypeUtil.DOUBLE_VECTOR_FIELD);
DistanceQuery<DoubleVector> dist = db.getDistanceQuery(rep, CosineDistanceFunction.STATIC);
if (expectKNNQuery != null) {
// get the 10 next neighbors
DoubleVector dv = DoubleVector.wrap(querypoint);
KNNQuery<DoubleVector> knnq = db.getKNNQuery(dist, k);
assertTrue("Returned knn query is not of expected class: expected " + expectKNNQuery + " got " + knnq.getClass(), expectKNNQuery.isAssignableFrom(knnq.getClass()));
KNNList ids = knnq.getKNNForObject(dv, k);
assertEquals("Result size does not match expectation!", cosshouldd.length, ids.size());
// verify that the neighbors match.
int i = 0;
for (DoubleDBIDListIter res = ids.iter(); res.valid(); res.advance(), i++) {
// Verify distance
assertEquals("Expected distance doesn't match.", cosshouldd[i], res.doubleValue(), 1e-15);
// verify vector
DoubleVector c = rep.get(res);
DoubleVector c2 = DoubleVector.wrap(cosshouldc[i]);
assertEquals("Expected vector doesn't match: " + c.toString(), 0.0, dist.distance(c, c2), 1e-15);
}
}
if (expectRangeQuery != null) {
// Do a range query
DoubleVector dv = DoubleVector.wrap(querypoint);
RangeQuery<DoubleVector> rangeq = db.getRangeQuery(dist, coseps);
assertTrue("Returned range query is not of expected class: expected " + expectRangeQuery + " got " + rangeq.getClass(), expectRangeQuery.isAssignableFrom(rangeq.getClass()));
DoubleDBIDList ids = rangeq.getRangeForObject(dv, coseps);
assertEquals("Result size does not match expectation!", cosshouldd.length, ids.size());
// verify that the neighbors match.
int i = 0;
for (DoubleDBIDListIter res = ids.iter(); res.valid(); res.advance(), i++) {
// Verify distance
assertEquals("Expected distance doesn't match.", cosshouldd[i], res.doubleValue(), 1e-15);
// verify vector
DoubleVector c = rep.get(res);
DoubleVector c2 = DoubleVector.wrap(cosshouldc[i]);
assertEquals("Expected vector doesn't match: " + c.toString(), 0.0, dist.distance(c, c2), 1e-15);
}
}
}
Aggregations