use of de.lmu.ifi.dbs.elki.utilities.scaling.ScalingFunction in project elki by elki-project.
the class JudgeOutlierScores method computeScore.
/**
* Evaluate a single outlier score result.
*
* @param ids Inlier IDs
* @param outlierIds Outlier IDs
* @param or Outlier Result to evaluate
* @return Outlier score result
* @throws IllegalStateException
*/
protected ScoreResult computeScore(DBIDs ids, DBIDs outlierIds, OutlierResult or) throws IllegalStateException {
if (scaling instanceof OutlierScalingFunction) {
OutlierScalingFunction oscaling = (OutlierScalingFunction) scaling;
oscaling.prepare(or);
}
final ScalingFunction innerScaling;
// If we have useful (finite) min/max, use these for binning.
double min = scaling.getMin();
double max = scaling.getMax();
if (Double.isInfinite(min) || Double.isNaN(min) || Double.isInfinite(max) || Double.isNaN(max)) {
innerScaling = new IdentityScaling();
// TODO: does the outlier score give us this guarantee?
LOG.warning("JudgeOutlierScores expects values between 0.0 and 1.0, but we don't have such a guarantee by the scaling function: min:" + min + " max:" + max);
} else {
if (min == 0.0 && max == 1.0) {
innerScaling = new IdentityScaling();
} else {
innerScaling = new LinearScaling(1.0 / (max - min), -min);
}
}
double posscore = 0.0;
double negscore = 0.0;
// fill histogram with values of each object
for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) {
double result = or.getScores().doubleValue(iter);
result = innerScaling.getScaled(scaling.getScaled(result));
posscore += (1.0 - result);
}
for (DBIDIter iter = outlierIds.iter(); iter.valid(); iter.advance()) {
double result = or.getScores().doubleValue(iter);
result = innerScaling.getScaled(scaling.getScaled(result));
negscore += result;
}
posscore /= ids.size();
negscore /= outlierIds.size();
LOG.verbose("Scores: " + posscore + " " + negscore);
ArrayList<double[]> s = new ArrayList<>(1);
s.add(new double[] { (posscore + negscore) * .5, posscore, negscore });
return new ScoreResult(s);
}
Aggregations