use of de.lmu.ifi.dbs.elki.math.MeanVariance in project elki by elki-project.
the class SigmoidOutlierScalingFunction method prepare.
@Override
public void prepare(OutlierResult or) {
// Initial parameters - are these defaults sounds?
MeanVariance mv = new MeanVariance();
DoubleRelation scores = or.getScores();
for (DBIDIter id = scores.iterDBIDs(); id.valid(); id.advance()) {
double val = scores.doubleValue(id);
mv.put(val);
}
double a = 1.0;
double b = -mv.getMean();
int iter = 0;
ArrayDBIDs ids = DBIDUtil.ensureArray(or.getScores().getDBIDs());
DBIDArrayIter it = ids.iter();
long[] t = BitsUtil.zero(ids.size());
boolean changing = true;
while (changing) {
changing = false;
// E-Step
it.seek(0);
for (int i = 0; i < ids.size(); i++, it.advance()) {
double val = or.getScores().doubleValue(it);
double targ = a * val + b;
if (targ > 0) {
if (!BitsUtil.get(t, i)) {
BitsUtil.setI(t, i);
changing = true;
}
} else {
if (BitsUtil.get(t, i)) {
BitsUtil.clearI(t, i);
changing = true;
}
}
}
if (!changing) {
break;
}
// logger.debugFine("Number of outliers in sigmoid: " + t.cardinality());
// M-Step
// Implementation based on:<br />
// H.-T. Lin, C.-J. Lin, R. C. Weng:<br />
// A Note on Platt’s Probabilistic Outputs for Support Vector Machines
{
double[] newab = MStepLevenbergMarquardt(a, b, ids, t, or.getScores());
a = newab[0];
b = newab[1];
}
iter++;
if (iter > 100) {
LOG.warning("Max iterations met in sigmoid fitting.");
break;
}
}
Afinal = a;
Bfinal = b;
LOG.debugFine("A = " + Afinal + " B = " + Bfinal);
}
use of de.lmu.ifi.dbs.elki.math.MeanVariance in project elki by elki-project.
the class MixtureModelOutlierScalingFunction method prepare.
@Override
public void prepare(OutlierResult or) {
// Initial parameters - are these defaults sounds?
MeanVariance mv = new MeanVariance();
DoubleRelation scores = or.getScores();
for (DBIDIter id = scores.iterDBIDs(); id.valid(); id.advance()) {
double val = scores.doubleValue(id);
if (!Double.isNaN(val) && !Double.isInfinite(val)) {
mv.put(val);
}
}
double curMu = mv.getMean() * 2.;
if (curMu == 0) {
curMu = Double.MIN_NORMAL;
}
double curSigma = Math.max(mv.getSampleStddev(), Double.MIN_NORMAL);
double curLambda = Math.min(1.0 / curMu, Double.MAX_VALUE);
double curAlpha = 0.05;
DBIDs ids = scores.getDBIDs();
// TODO: stop condition!
int iter = 0;
// " lambda = " + curLambda + " alpha = " + curAlpha);
while (true) {
// E and M-Steps
// Sum of weights for both distributions
double otisum = 0.0, itisum = 0.0;
// Weighted sum for both distributions
double owsum = 0.0, iwsum = 0.0;
// Weighted deviation from previous mean (Gaussian only)
double osqsum = 0.0;
for (DBIDIter it = ids.iter(); it.valid(); it.advance()) {
double val = scores.doubleValue(it);
// E-Step: estimate outlier probability
double ti = calcPosterior(val, curAlpha, curMu, curSigma, curLambda);
// M-Step
otisum += ti;
itisum += 1 - ti;
owsum += ti * val;
iwsum += (1 - ti) * val;
// (val - curMu) * (val - curMu);
osqsum += ti * val * val;
}
if (otisum <= 0.0 || owsum <= 0.0) {
LOG.warning("MixtureModel Outlier Scaling converged to extreme.");
break;
}
double newMu = owsum / otisum;
double newSigma = Math.max(FastMath.sqrt(osqsum / otisum - newMu * newMu), Double.MIN_NORMAL);
double newLambda = Math.min(itisum / iwsum, Double.MAX_VALUE);
double newAlpha = otisum / ids.size();
// converged?
if (//
Math.abs(newMu - curMu) < DELTA && //
Math.abs(newSigma - curSigma) < DELTA && //
Math.abs(newLambda - curLambda) < DELTA && Math.abs(newAlpha - curAlpha) < DELTA) {
break;
}
if (newSigma <= 0.0 || newAlpha <= 0.0) {
LOG.warning("MixtureModel Outlier Scaling converged to extreme.");
break;
}
// LOG.debugFine("iter #"+iter+" mu = " + newMu + " sigma = " +
// newSigma + " lambda = " + newLambda + " alpha = " + newAlpha);
curMu = newMu;
curSigma = newSigma;
curLambda = newLambda;
curAlpha = newAlpha;
iter++;
if (iter > 100) {
LOG.warning("Max iterations met in mixture model fitting.");
break;
}
}
mu = curMu;
sigma = curSigma;
lambda = curLambda;
alpha = curAlpha;
// LOG.debugFine("mu = " + mu + " sigma = " + sigma + " lambda = " +
// lambda + " alpha = " + alpha);
}
use of de.lmu.ifi.dbs.elki.math.MeanVariance in project elki by elki-project.
the class MixtureModelOutlierScalingFunction method prepare.
@Override
public <A> void prepare(A array, NumberArrayAdapter<?, A> adapter) {
// Initial parameters - are these defaults sounds?
MeanVariance mv = new MeanVariance();
final int size = adapter.size(array);
for (int i = 0; i < size; i++) {
double val = adapter.getDouble(array, i);
if (!Double.isNaN(val) && !Double.isInfinite(val)) {
mv.put(val);
}
}
double curMu = mv.getMean() * 2.;
if (curMu == 0) {
curMu = Double.MIN_NORMAL;
}
double curSigma = Math.max(mv.getSampleStddev(), Double.MIN_NORMAL);
double curLambda = Math.min(1.0 / curMu, Double.MAX_VALUE);
double curAlpha = 0.05;
// TODO: stop condition!
int iter = 0;
// " lambda = " + curLambda + " alpha = " + curAlpha);
while (true) {
// E and M-Steps
// Sum of weights for both distributions
double otisum = 0.0, itisum = 0.0;
// Weighted sum for both distributions
double owsum = 0.0, iwsum = 0.0;
// Weighted deviation from previous mean (Gaussian only)
double osqsum = 0.0;
for (int i = 0; i < size; i++) {
double val = adapter.getDouble(array, i);
// E-Step
double ti = calcPosterior(val, curAlpha, curMu, curSigma, curLambda);
// M-Step
otisum += ti;
itisum += 1 - ti;
owsum += ti * val;
iwsum += (1 - ti) * val;
// (val - curMu) * (val - curMu);
osqsum += ti * val * val;
}
if (otisum <= 0.0 || owsum <= 0.0) {
LOG.warning("MixtureModel Outlier Scaling converged to extreme.");
break;
}
double newMu = owsum / otisum;
double newSigma = Math.max(FastMath.sqrt(osqsum / otisum - newMu * newMu), Double.MIN_NORMAL);
double newLambda = Math.min(itisum / iwsum, Double.MAX_VALUE);
double newAlpha = otisum / size;
// converged?
if (//
Math.abs(newMu - curMu) < DELTA && //
Math.abs(newSigma - curSigma) < DELTA && //
Math.abs(newLambda - curLambda) < DELTA && Math.abs(newAlpha - curAlpha) < DELTA) {
break;
}
if (newSigma <= 0.0 || newAlpha <= 0.0) {
LOG.warning("MixtureModel Outlier Scaling converged to extreme.");
break;
}
// LOG.debugFine("iter #"+iter+" mu = " + newMu + " sigma = " +
// newSigma + " lambda = " + newLambda + " alpha = " + newAlpha);
curMu = newMu;
curSigma = newSigma;
curLambda = newLambda;
curAlpha = newAlpha;
iter++;
if (iter > 100) {
LOG.warning("Max iterations met in mixture model fitting.");
break;
}
}
mu = curMu;
sigma = curSigma;
lambda = curLambda;
alpha = curAlpha;
// LOG.debugFine("mu = " + mu + " sigma = " + sigma + " lambda = " +
// lambda + " alpha = " + alpha);
}
use of de.lmu.ifi.dbs.elki.math.MeanVariance in project elki by elki-project.
the class OutlierGammaScaling method prepare.
@Override
public void prepare(OutlierResult or) {
meta = or.getOutlierMeta();
MeanVariance mv = new MeanVariance();
DoubleRelation scores = or.getScores();
for (DBIDIter id = scores.iterDBIDs(); id.valid(); id.advance()) {
double score = scores.doubleValue(id);
score = preScale(score);
if (!Double.isNaN(score) && !Double.isInfinite(score)) {
mv.put(score);
}
}
final double mean = mv.getMean();
final double var = mv.getSampleVariance();
k = (mean * mean) / var;
theta = var / mean;
atmean = GammaDistribution.regularizedGammaP(k, mean / theta);
// logger.warning("Mean:"+mean+" Var:"+var+" Theta: "+theta+" k: "+k+" valatmean"+atmean);
}
use of de.lmu.ifi.dbs.elki.math.MeanVariance in project elki by elki-project.
the class MeanVarianceDistributionEstimator method estimate.
@Override
default <A> D estimate(A data, NumberArrayAdapter<?, A> adapter) {
final int size = adapter.size(data);
MeanVariance mv = new MeanVariance();
for (int i = 0; i < size; i++) {
final double val = adapter.getDouble(data, i);
if (Double.isInfinite(val) || Double.isNaN(val)) {
continue;
}
mv.put(val);
}
return estimateFromMeanVariance(mv);
}
Aggregations