use of de.lmu.ifi.dbs.elki.result.outlier.OutlierResult in project elki by elki-project.
the class TrivialNoOutlier method run.
/**
* Run the actual algorithm.
*
* @param relation Relation
* @return Result
*/
public OutlierResult run(Relation<?> relation) {
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT);
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
scores.putDouble(iditer, 0.0);
}
DoubleRelation scoreres = new MaterializedDoubleRelation("Trivial no-outlier score", "no-outlier", scores, relation.getDBIDs());
OutlierScoreMeta meta = new ProbabilisticOutlierScore();
return new OutlierResult(meta, scoreres);
}
use of de.lmu.ifi.dbs.elki.result.outlier.OutlierResult in project elki by elki-project.
the class ComputeOutlierHistogram method processNewResult.
@Override
public void processNewResult(ResultHierarchy hier, Result newResult) {
final Database db = ResultUtil.findDatabase(hier);
List<OutlierResult> ors = ResultUtil.filterResults(hier, newResult, OutlierResult.class);
if (ors == null || ors.isEmpty()) {
// "+ComputeOutlierHistogram.class.getSimpleName());
return;
}
for (OutlierResult or : ors) {
db.getHierarchy().add(or, evaluateOutlierResult(db, or));
}
}
use of de.lmu.ifi.dbs.elki.result.outlier.OutlierResult in project elki by elki-project.
the class SLOM method run.
/**
* @param database Database to process
* @param spatial Spatial Relation to use.
* @param relation Relation to use.
* @return Outlier detection result
*/
public OutlierResult run(Database database, Relation<N> spatial, Relation<O> relation) {
final NeighborSetPredicate npred = getNeighborSetPredicateFactory().instantiate(database, spatial);
DistanceQuery<O> distFunc = getNonSpatialDistanceFunction().instantiate(relation);
WritableDoubleDataStore modifiedDistance = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP);
// calculate D-Tilde
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
double sum = 0;
double maxDist = 0;
int cnt = 0;
final DBIDs neighbors = npred.getNeighborDBIDs(iditer);
for (DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) {
if (DBIDUtil.equal(iditer, iter)) {
continue;
}
double dist = distFunc.distance(iditer, iter);
sum += dist;
cnt++;
maxDist = Math.max(maxDist, dist);
}
if (cnt > 1) {
modifiedDistance.putDouble(iditer, ((sum - maxDist) / (cnt - 1)));
} else {
// Use regular distance when the d-tilde trick is undefined.
// Note: this can be 0 when there were no neighbors.
modifiedDistance.putDouble(iditer, maxDist);
}
}
// Second step - compute actual SLOM values
DoubleMinMax slomminmax = new DoubleMinMax();
WritableDoubleDataStore sloms = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC);
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
double sum = 0;
int cnt = 0;
final DBIDs neighbors = npred.getNeighborDBIDs(iditer);
for (DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) {
if (DBIDUtil.equal(iditer, iter)) {
continue;
}
sum += modifiedDistance.doubleValue(iter);
cnt++;
}
double slom;
if (cnt > 0) {
// With and without the object itself:
double avgPlus = (sum + modifiedDistance.doubleValue(iditer)) / (cnt + 1);
double avg = sum / cnt;
double beta = 0;
for (DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) {
final double dist = modifiedDistance.doubleValue(iter);
if (dist > avgPlus) {
beta += 1;
} else if (dist < avgPlus) {
beta -= 1;
}
}
// Include object itself
if (!neighbors.contains(iditer)) {
final double dist = modifiedDistance.doubleValue(iditer);
if (dist > avgPlus) {
beta += 1;
} else if (dist < avgPlus) {
beta -= 1;
}
}
beta = Math.abs(beta);
// note: cnt == size of N(x), not N+(x)
if (cnt > 1) {
beta = Math.max(beta, 1.0) / (cnt - 1);
} else {
// Workaround insufficiency in SLOM paper - div by zero
beta = 1.0;
}
beta = beta / (1 + avg);
slom = beta * modifiedDistance.doubleValue(iditer);
} else {
// No neighbors to compare to - no score.
slom = 0.0;
}
sloms.putDouble(iditer, slom);
slomminmax.put(slom);
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("SLOM", "slom-outlier", sloms, relation.getDBIDs());
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(slomminmax.getMin(), slomminmax.getMax(), 0.0, Double.POSITIVE_INFINITY);
OutlierResult or = new OutlierResult(scoreMeta, scoreResult);
or.addChildResult(npred);
return or;
}
use of de.lmu.ifi.dbs.elki.result.outlier.OutlierResult in project elki by elki-project.
the class FastABOD method run.
/**
* Run Fast-ABOD on the data set.
*
* @param relation Relation to process
* @return Outlier detection result
*/
@Override
public OutlierResult run(Database db, Relation<V> relation) {
DBIDs ids = relation.getDBIDs();
// Build a kernel matrix, to make O(n^3) slightly less bad.
SimilarityQuery<V> sq = db.getSimilarityQuery(relation, kernelFunction);
KernelMatrix kernelMatrix = new KernelMatrix(sq, relation, ids);
WritableDoubleDataStore abodvalues = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC);
DoubleMinMax minmaxabod = new DoubleMinMax();
MeanVariance s = new MeanVariance();
KNNHeap nn = DBIDUtil.newHeap(k);
for (DBIDIter pA = ids.iter(); pA.valid(); pA.advance()) {
final double simAA = kernelMatrix.getSimilarity(pA, pA);
// Choose the k-min nearest
nn.clear();
for (DBIDIter nB = relation.iterDBIDs(); nB.valid(); nB.advance()) {
if (DBIDUtil.equal(nB, pA)) {
continue;
}
double simBB = kernelMatrix.getSimilarity(nB, nB);
double simAB = kernelMatrix.getSimilarity(pA, nB);
double sqdAB = simAA + simBB - simAB - simAB;
if (!(sqdAB > 0.)) {
continue;
}
nn.insert(sqdAB, nB);
}
KNNList nl = nn.toKNNList();
s.reset();
DoubleDBIDListIter iB = nl.iter(), iC = nl.iter();
for (; iB.valid(); iB.advance()) {
double sqdAB = iB.doubleValue();
double simAB = kernelMatrix.getSimilarity(pA, iB);
if (!(sqdAB > 0.)) {
continue;
}
for (iC.seek(iB.getOffset() + 1); iC.valid(); iC.advance()) {
double sqdAC = iC.doubleValue();
double simAC = kernelMatrix.getSimilarity(pA, iC);
if (!(sqdAC > 0.)) {
continue;
}
// Exploit bilinearity of scalar product:
// <B-A, C-A> = <B, C-A> - <A,C-A>
// = <B,C> - <B,A> - <A,C> + <A,A>
double simBC = kernelMatrix.getSimilarity(iB, iC);
double numerator = simBC - simAB - simAC + simAA;
double div = 1. / (sqdAB * sqdAC);
s.put(numerator * div, FastMath.sqrt(div));
}
}
// Sample variance probably would probably be better, but the ABOD
// publication uses the naive variance.
final double abof = s.getNaiveVariance();
minmaxabod.put(abof);
abodvalues.putDouble(pA, abof);
}
// Build result representation.
DoubleRelation scoreResult = new MaterializedDoubleRelation("Angle-Based Outlier Degree", "abod-outlier", abodvalues, relation.getDBIDs());
OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta(minmaxabod.getMin(), minmaxabod.getMax(), 0.0, Double.POSITIVE_INFINITY);
return new OutlierResult(scoreMeta, scoreResult);
}
use of de.lmu.ifi.dbs.elki.result.outlier.OutlierResult in project elki by elki-project.
the class ISOS method run.
/**
* Run the algorithm.
*
* @param relation data relation.
* @return outlier detection result
*/
public OutlierResult run(Relation<O> relation) {
// Query size
final int k1 = k + 1;
final double perplexity = k / 3.;
KNNQuery<O> knnq = relation.getKNNQuery(getDistanceFunction(), k1);
final double logPerp = FastMath.log(perplexity);
double[] p = new double[k + 10];
ModifiableDoubleDBIDList dists = DBIDUtil.newDistanceDBIDList(k + 10);
DoubleDBIDListIter di = dists.iter();
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("ISOS scores", relation.size(), LOG) : null;
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB, 1.);
for (DBIDIter it = relation.iterDBIDs(); it.valid(); it.advance()) {
KNNList knns = knnq.getKNNForDBID(it, k1);
if (p.length < knns.size() + 1) {
p = new double[knns.size() + 10];
}
final DoubleDBIDListIter ki = knns.iter();
try {
double id = estimateID(it, ki, p);
adjustDistances(it, ki, knns.getKNNDistance(), id, dists);
// We now continue with the modified distances:
// Compute affinities
SOS.computePi(it, di, p, perplexity, logPerp);
// Normalization factor:
double s = SOS.sumOfProbabilities(it, di, p);
if (s > 0) {
nominateNeighbors(it, di, p, 1. / s, scores);
}
} catch (ArithmeticException e) {
// ID estimation failed, supposedly constant values.
if (knns.size() > 1) {
Arrays.fill(p, 1. / (knns.size() - 1));
nominateNeighbors(it, di, p, 1., scores);
}
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
DoubleMinMax minmax = transformScores(scores, relation.getDBIDs(), logPerp, phi);
DoubleRelation scoreres = new MaterializedDoubleRelation("Intrinsic Stoachastic Outlier Selection", "isos-outlier", scores, relation.getDBIDs());
OutlierScoreMeta meta = new ProbabilisticOutlierScore(minmax.getMin(), minmax.getMax(), 0.);
return new OutlierResult(meta, scoreres);
}
Aggregations