use of de.lmu.ifi.dbs.elki.result.outlier.BasicOutlierScoreMeta in project elki by elki-project.
the class HiCS method run.
/**
* Perform HiCS on a given database.
*
* @param relation the database
* @return The aggregated resulting scores that were assigned by the given
* outlier detection algorithm
*/
public OutlierResult run(Relation<V> relation) {
final DBIDs ids = relation.getDBIDs();
ArrayList<ArrayDBIDs> subspaceIndex = buildOneDimIndexes(relation);
Set<HiCSSubspace> subspaces = calculateSubspaces(relation, subspaceIndex, rnd.getSingleThreadedRandom());
if (LOG.isVerbose()) {
LOG.verbose("Number of high-contrast subspaces: " + subspaces.size());
}
List<DoubleRelation> results = new ArrayList<>();
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Calculating Outlier scores for high Contrast subspaces", subspaces.size(), LOG) : null;
// relation instead of SubspaceEuclideanDistanceFunction?)
for (HiCSSubspace dimset : subspaces) {
if (LOG.isVerbose()) {
LOG.verbose("Performing outlier detection in subspace " + dimset);
}
ProxyDatabase pdb = new ProxyDatabase(ids);
pdb.addRelation(new ProjectedView<>(relation, new NumericalFeatureSelection<V>(dimset)));
// run LOF and collect the result
OutlierResult result = outlierAlgorithm.run(pdb);
results.add(result.getScores());
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC);
DoubleMinMax minmax = new DoubleMinMax();
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
double sum = 0.0;
for (DoubleRelation r : results) {
final double s = r.doubleValue(iditer);
if (!Double.isNaN(s)) {
sum += s;
}
}
scores.putDouble(iditer, sum);
minmax.put(sum);
}
OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax());
DoubleRelation scoreres = new MaterializedDoubleRelation("HiCS", "HiCS-outlier", scores, relation.getDBIDs());
return new OutlierResult(meta, scoreres);
}
use of de.lmu.ifi.dbs.elki.result.outlier.BasicOutlierScoreMeta in project elki by elki-project.
the class SimpleOutlierEnsemble method run.
@Override
public OutlierResult run(Database database) throws IllegalStateException {
int num = algorithms.size();
// Run inner outlier algorithms
ModifiableDBIDs ids = DBIDUtil.newHashSet();
ArrayList<OutlierResult> results = new ArrayList<>(num);
{
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Inner outlier algorithms", num, LOG) : null;
for (Algorithm alg : algorithms) {
Result res = alg.run(database);
List<OutlierResult> ors = OutlierResult.getOutlierResults(res);
for (OutlierResult or : ors) {
results.add(or);
ids.addDBIDs(or.getScores().getDBIDs());
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
}
// Combine
WritableDoubleDataStore sumscore = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_STATIC);
DoubleMinMax minmax = new DoubleMinMax();
{
FiniteProgress cprog = LOG.isVerbose() ? new FiniteProgress("Combining results", ids.size(), LOG) : null;
for (DBIDIter id = ids.iter(); id.valid(); id.advance()) {
double[] scores = new double[num];
int i = 0;
for (OutlierResult r : results) {
double score = r.getScores().doubleValue(id);
if (!Double.isNaN(score)) {
scores[i] = score;
i++;
} else {
LOG.warning("DBID " + id + " was not given a score by result " + r);
}
}
if (i > 0) {
// Shrink array if necessary.
if (i < scores.length) {
scores = Arrays.copyOf(scores, i);
}
double combined = voting.combine(scores);
sumscore.putDouble(id, combined);
minmax.put(combined);
} else {
LOG.warning("DBID " + id + " was not given any score at all.");
}
LOG.incrementProcessed(cprog);
}
LOG.ensureCompleted(cprog);
}
OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax());
DoubleRelation scores = new MaterializedDoubleRelation("Simple Outlier Ensemble", "ensemble-outlier", sumscore, ids);
return new OutlierResult(meta, scores);
}
use of de.lmu.ifi.dbs.elki.result.outlier.BasicOutlierScoreMeta in project elki by elki-project.
the class CTLuGLSBackwardSearchAlgorithm method run.
/**
* Run the algorithm
*
* @param database Database to process
* @param relationx Spatial relation
* @param relationy Attribute relation
* @return Algorithm result
*/
public OutlierResult run(Database database, Relation<V> relationx, Relation<? extends NumberVector> relationy) {
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relationx.getDBIDs(), DataStoreFactory.HINT_STATIC);
DoubleMinMax mm = new DoubleMinMax(0.0, 0.0);
// Outlier detection loop
{
ModifiableDBIDs idview = DBIDUtil.newHashSet(relationx.getDBIDs());
ProxyView<V> proxy = new ProxyView<>(idview, relationx);
double phialpha = NormalDistribution.standardNormalQuantile(1.0 - alpha * .5);
// Detect outliers while significant.
while (true) {
Pair<DBIDVar, Double> candidate = singleIteration(proxy, relationy);
if (candidate.second < phialpha) {
break;
}
scores.putDouble(candidate.first, candidate.second);
if (!Double.isNaN(candidate.second)) {
mm.put(candidate.second);
}
idview.remove(candidate.first);
}
// Remaining objects are inliers
for (DBIDIter iter = idview.iter(); iter.valid(); iter.advance()) {
scores.putDouble(iter, 0.0);
}
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("GLSSODBackward", "GLSSODbackward-outlier", scores, relationx.getDBIDs());
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(mm.getMin(), mm.getMax(), 0, Double.POSITIVE_INFINITY, 0);
return new OutlierResult(scoreMeta, scoreResult);
}
use of de.lmu.ifi.dbs.elki.result.outlier.BasicOutlierScoreMeta in project elki by elki-project.
the class CTLuMoranScatterplotOutlier method run.
/**
* Main method.
*
* @param database Database
* @param nrel Neighborhood relation
* @param relation Data relation (1d!)
* @return Outlier detection result
*/
public OutlierResult run(Database database, Relation<N> nrel, Relation<? extends NumberVector> relation) {
final NeighborSetPredicate npred = getNeighborSetPredicateFactory().instantiate(database, nrel);
// Compute the global mean and variance
MeanVariance globalmv = new MeanVariance();
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
globalmv.put(relation.get(iditer).doubleValue(0));
}
DoubleMinMax minmax = new DoubleMinMax();
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC);
// calculate neighborhood average of normalized attribute values.
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
// Compute global z score
final double globalZ = (relation.get(iditer).doubleValue(0) - globalmv.getMean()) / globalmv.getNaiveStddev();
// Compute local average z score
Mean localm = new Mean();
for (DBIDIter iter = npred.getNeighborDBIDs(iditer).iter(); iter.valid(); iter.advance()) {
if (DBIDUtil.equal(iditer, iter)) {
continue;
}
localm.put((relation.get(iter).doubleValue(0) - globalmv.getMean()) / globalmv.getNaiveStddev());
}
// if neighors.size == 0
final double localZ;
if (localm.getCount() > 0) {
localZ = localm.getMean();
} else {
// if s has no neighbors => Wzi = zi
localZ = globalZ;
}
// compute score
// Note: in the original moran scatterplot, any object with a score < 0 would be an outlier.
final double score = Math.max(-globalZ * localZ, 0);
minmax.put(score);
scores.putDouble(iditer, score);
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("MoranOutlier", "Moran Scatterplot Outlier", scores, relation.getDBIDs());
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 0);
OutlierResult or = new OutlierResult(scoreMeta, scoreResult);
or.addChildResult(npred);
return or;
}
use of de.lmu.ifi.dbs.elki.result.outlier.BasicOutlierScoreMeta in project elki by elki-project.
the class ParallelSimplifiedLOF method run.
public OutlierResult run(Database database, Relation<O> relation) {
DBIDs ids = relation.getDBIDs();
DistanceQuery<O> distq = database.getDistanceQuery(relation, getDistanceFunction());
KNNQuery<O> knnq = database.getKNNQuery(distq, k + 1);
// Phase one: KNN and k-dist
WritableDataStore<KNNList> knns = DataStoreUtil.makeStorage(ids, DataStoreFactory.HINT_DB, KNNList.class);
{
// Compute kNN
KNNProcessor<O> knnm = new KNNProcessor<>(k + 1, knnq);
SharedObject<KNNList> knnv = new SharedObject<>();
WriteDataStoreProcessor<KNNList> storek = new WriteDataStoreProcessor<>(knns);
knnm.connectKNNOutput(knnv);
storek.connectInput(knnv);
ParallelExecutor.run(ids, knnm, storek);
}
// Phase two: simplified-lrd
WritableDoubleDataStore lrds = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_DB);
{
SimplifiedLRDProcessor lrdm = new SimplifiedLRDProcessor(knns);
SharedDouble lrdv = new SharedDouble();
WriteDoubleDataStoreProcessor storelrd = new WriteDoubleDataStoreProcessor(lrds);
lrdm.connectOutput(lrdv);
storelrd.connectInput(lrdv);
ParallelExecutor.run(ids, lrdm, storelrd);
}
// Phase three: Simplified-LOF
WritableDoubleDataStore lofs = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_DB);
DoubleMinMax minmax;
{
LOFProcessor lofm = new LOFProcessor(knns, lrds, true);
SharedDouble lofv = new SharedDouble();
DoubleMinMaxProcessor mmm = new DoubleMinMaxProcessor();
WriteDoubleDataStoreProcessor storelof = new WriteDoubleDataStoreProcessor(lofs);
lofm.connectOutput(lofv);
mmm.connectInput(lofv);
storelof.connectInput(lofv);
ParallelExecutor.run(ids, lofm, storelof, mmm);
minmax = mmm.getMinMax();
}
DoubleRelation scoreres = new MaterializedDoubleRelation("Simplified Local Outlier Factor", "simplified-lof-outlier", lofs, ids);
OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0);
return new OutlierResult(meta, scoreres);
}
Aggregations