use of de.lmu.ifi.dbs.elki.database.datastore.WritableDoubleDataStore in project elki by elki-project.
the class ISOS method run.
/**
* Run the algorithm.
*
* @param relation data relation.
* @return outlier detection result
*/
public OutlierResult run(Relation<O> relation) {
// Query size
final int k1 = k + 1;
final double perplexity = k / 3.;
KNNQuery<O> knnq = relation.getKNNQuery(getDistanceFunction(), k1);
final double logPerp = FastMath.log(perplexity);
double[] p = new double[k + 10];
ModifiableDoubleDBIDList dists = DBIDUtil.newDistanceDBIDList(k + 10);
DoubleDBIDListIter di = dists.iter();
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("ISOS scores", relation.size(), LOG) : null;
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_DB, 1.);
for (DBIDIter it = relation.iterDBIDs(); it.valid(); it.advance()) {
KNNList knns = knnq.getKNNForDBID(it, k1);
if (p.length < knns.size() + 1) {
p = new double[knns.size() + 10];
}
final DoubleDBIDListIter ki = knns.iter();
try {
double id = estimateID(it, ki, p);
adjustDistances(it, ki, knns.getKNNDistance(), id, dists);
// We now continue with the modified distances:
// Compute affinities
SOS.computePi(it, di, p, perplexity, logPerp);
// Normalization factor:
double s = SOS.sumOfProbabilities(it, di, p);
if (s > 0) {
nominateNeighbors(it, di, p, 1. / s, scores);
}
} catch (ArithmeticException e) {
// ID estimation failed, supposedly constant values.
if (knns.size() > 1) {
Arrays.fill(p, 1. / (knns.size() - 1));
nominateNeighbors(it, di, p, 1., scores);
}
}
LOG.incrementProcessed(prog);
}
LOG.ensureCompleted(prog);
DoubleMinMax minmax = transformScores(scores, relation.getDBIDs(), logPerp, phi);
DoubleRelation scoreres = new MaterializedDoubleRelation("Intrinsic Stoachastic Outlier Selection", "isos-outlier", scores, relation.getDBIDs());
OutlierScoreMeta meta = new ProbabilisticOutlierScore(minmax.getMin(), minmax.getMax(), 0.);
return new OutlierResult(meta, scoreres);
}
use of de.lmu.ifi.dbs.elki.database.datastore.WritableDoubleDataStore in project elki by elki-project.
the class TrivialAverageCoordinateOutlier method run.
/**
* Run the actual algorithm.
*
* @param relation Relation
* @return Result
*/
public OutlierResult run(Relation<? extends NumberVector> relation) {
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT);
DoubleMinMax minmax = new DoubleMinMax();
Mean m = new Mean();
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
m.reset();
NumberVector nv = relation.get(iditer);
for (int i = 0; i < nv.getDimensionality(); i++) {
m.put(nv.doubleValue(i));
}
final double score = m.getMean();
scores.putDouble(iditer, score);
minmax.put(score);
}
DoubleRelation scoreres = new MaterializedDoubleRelation("Trivial mean score", "mean-outlier", scores, relation.getDBIDs());
OutlierScoreMeta meta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax());
return new OutlierResult(meta, scoreres);
}
use of de.lmu.ifi.dbs.elki.database.datastore.WritableDoubleDataStore in project elki by elki-project.
the class CTLuMeanMultipleAttributes method run.
/**
* Run the algorithm
*
* @param database Database
* @param spatial Spatial relation
* @param attributes Numerical attributes
* @return Outlier detection result
*/
public OutlierResult run(Database database, Relation<N> spatial, Relation<O> attributes) {
if (LOG.isDebugging()) {
LOG.debug("Dimensionality: " + RelationUtil.dimensionality(attributes));
}
final NeighborSetPredicate npred = getNeighborSetPredicateFactory().instantiate(database, spatial);
CovarianceMatrix covmaker = new CovarianceMatrix(RelationUtil.dimensionality(attributes));
WritableDataStore<double[]> deltas = DataStoreUtil.makeStorage(attributes.getDBIDs(), DataStoreFactory.HINT_TEMP, double[].class);
for (DBIDIter iditer = attributes.iterDBIDs(); iditer.valid(); iditer.advance()) {
final O obj = attributes.get(iditer);
final DBIDs neighbors = npred.getNeighborDBIDs(iditer);
// TODO: remove object itself from neighbors?
// Mean vector "g"
double[] mean = Centroid.make(attributes, neighbors).getArrayRef();
// Delta vector "h"
double[] delta = minusEquals(obj.toArray(), mean);
deltas.put(iditer, delta);
covmaker.put(delta);
}
// Finalize covariance matrix:
double[] mean = covmaker.getMeanVector();
double[][] cmati = inverse(covmaker.destroyToSampleMatrix());
DoubleMinMax minmax = new DoubleMinMax();
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(attributes.getDBIDs(), DataStoreFactory.HINT_STATIC);
for (DBIDIter iditer = attributes.iterDBIDs(); iditer.valid(); iditer.advance()) {
final double score = mahalanobisDistance(cmati, deltas.get(iditer), mean);
minmax.put(score);
scores.putDouble(iditer, score);
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("mean multiple attributes spatial outlier", "mean-multipleattributes-outlier", scores, attributes.getDBIDs());
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 0);
OutlierResult or = new OutlierResult(scoreMeta, scoreResult);
or.addChildResult(npred);
return or;
}
use of de.lmu.ifi.dbs.elki.database.datastore.WritableDoubleDataStore in project elki by elki-project.
the class CTLuRandomWalkEC method run.
/**
* Run the algorithm.
*
* @param spatial Spatial neighborhood relation
* @param relation Attribute value relation
* @return Outlier result
*/
public OutlierResult run(Relation<P> spatial, Relation<? extends NumberVector> relation) {
DistanceQuery<P> distFunc = getDistanceFunction().instantiate(spatial);
WritableDataStore<double[]> similarityVectors = DataStoreUtil.makeStorage(spatial.getDBIDs(), DataStoreFactory.HINT_TEMP, double[].class);
WritableDataStore<DBIDs> neighbors = DataStoreUtil.makeStorage(spatial.getDBIDs(), DataStoreFactory.HINT_TEMP, DBIDs.class);
// Make a static IDs array for matrix column indexing
ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs());
// construct the relation Matrix of the ec-graph
double[][] E = new double[ids.size()][ids.size()];
KNNHeap heap = DBIDUtil.newHeap(k);
{
int i = 0;
for (DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) {
final double val = relation.get(id).doubleValue(0);
assert (heap.size() == 0);
int j = 0;
for (DBIDIter n = ids.iter(); n.valid(); n.advance(), j++) {
if (i == j) {
continue;
}
final double e;
final double distance = distFunc.distance(id, n);
heap.insert(distance, n);
if (distance == 0) {
LOG.warning("Zero distances are not supported - skipping: " + DBIDUtil.toString(id) + " " + DBIDUtil.toString(n));
e = 0;
} else {
double diff = Math.abs(val - relation.get(n).doubleValue(0));
double exp = FastMath.exp(FastMath.pow(diff, alpha));
// Implementation note: not inverting exp worked a lot better.
// Therefore we diverge from the article here.
e = exp / distance;
}
E[j][i] = e;
}
// Convert kNN Heap into DBID array
ModifiableDBIDs nids = DBIDUtil.newArray(heap.size());
while (heap.size() > 0) {
nids.add(heap.poll());
}
neighbors.put(id, nids);
}
}
// Also do the -c multiplication in this process.
for (int i = 0; i < E[0].length; i++) {
double sum = 0.0;
for (int j = 0; j < E.length; j++) {
sum += E[j][i];
}
if (sum == 0) {
sum = 1.0;
}
for (int j = 0; j < E.length; j++) {
E[j][i] = -c * E[j][i] / sum;
}
}
// Add identity matrix. The diagonal should still be 0s, so this is trivial.
assert (E.length == E[0].length);
for (int col = 0; col < E[0].length; col++) {
assert (E[col][col] == 0.0);
E[col][col] = 1.0;
}
E = timesEquals(inverse(E), 1 - c);
// Split the matrix into columns
{
int i = 0;
for (DBIDIter id = ids.iter(); id.valid(); id.advance(), i++) {
// Note: matrix times ith unit vector = ith column
double[] sim = getCol(E, i);
similarityVectors.put(id, sim);
}
}
E = null;
// compute the relevance scores between specified Object and its neighbors
DoubleMinMax minmax = new DoubleMinMax();
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(spatial.getDBIDs(), DataStoreFactory.HINT_STATIC);
for (DBIDIter id = ids.iter(); id.valid(); id.advance()) {
double gmean = 1.0;
int cnt = 0;
for (DBIDIter iter = neighbors.get(id).iter(); iter.valid(); iter.advance()) {
if (DBIDUtil.equal(id, iter)) {
continue;
}
double sim = VMath.angle(similarityVectors.get(id), similarityVectors.get(iter));
gmean *= sim;
cnt++;
}
final double score = FastMath.pow(gmean, 1.0 / cnt);
minmax.put(score);
scores.putDouble(id, score);
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("randomwalkec", "RandomWalkEC", scores, relation.getDBIDs());
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(minmax.getMin(), minmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 0.0);
return new OutlierResult(scoreMeta, scoreResult);
}
use of de.lmu.ifi.dbs.elki.database.datastore.WritableDoubleDataStore in project elki by elki-project.
the class SOF method run.
/**
* The main run method
*
* @param database Database to use (actually unused)
* @param spatial Relation for neighborhood
* @param relation Attributes to evaluate
* @return Outlier result
*/
public OutlierResult run(Database database, Relation<N> spatial, Relation<O> relation) {
final NeighborSetPredicate npred = getNeighborSetPredicateFactory().instantiate(database, spatial);
DistanceQuery<O> distFunc = getNonSpatialDistanceFunction().instantiate(relation);
WritableDoubleDataStore lrds = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_TEMP | DataStoreFactory.HINT_HOT);
WritableDoubleDataStore lofs = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_STATIC);
DoubleMinMax lofminmax = new DoubleMinMax();
// Compute densities
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DBIDs neighbors = npred.getNeighborDBIDs(iditer);
double avg = 0;
for (DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) {
avg += distFunc.distance(iditer, iter);
}
double lrd = 1 / (avg / neighbors.size());
if (Double.isNaN(lrd)) {
lrd = 0;
}
lrds.putDouble(iditer, lrd);
}
// Compute density quotients
for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DBIDs neighbors = npred.getNeighborDBIDs(iditer);
double avg = 0;
for (DBIDIter iter = neighbors.iter(); iter.valid(); iter.advance()) {
avg += lrds.doubleValue(iter);
}
final double lrd = (avg / neighbors.size()) / lrds.doubleValue(iditer);
if (!Double.isNaN(lrd)) {
lofs.putDouble(iditer, lrd);
lofminmax.put(lrd);
} else {
lofs.putDouble(iditer, 0.0);
}
}
// Build result representation.
DoubleRelation scoreResult = new MaterializedDoubleRelation("Spatial Outlier Factor", "sof-outlier", lofs, relation.getDBIDs());
OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta(lofminmax.getMin(), lofminmax.getMax(), 0.0, Double.POSITIVE_INFINITY, 1.0);
OutlierResult or = new OutlierResult(scoreMeta, scoreResult);
or.addChildResult(npred);
return or;
}
Aggregations