use of de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.ListParameterization in project elki by elki-project.
the class LogLogisticDistributionTest method testParameterizer.
@Test
public void testParameterizer() throws ClassInstantiationException {
load("loglogistic.ascii.gz");
ListParameterization params = new ListParameterization();
params.addParameter(LogLogisticDistribution.Parameterizer.SHAPE_ID, 2.);
params.addParameter(LogLogisticDistribution.Parameterizer.LOCATION_ID, 0.);
params.addParameter(LogLogisticDistribution.Parameterizer.SCALE_ID, .5);
Distribution dist = ClassGenericsUtil.parameterizeOrAbort(LogLogisticDistribution.class, params);
checkPDF(dist, "pdf_scipy_2_05", 1e-15);
}
use of de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.ListParameterization in project elki by elki-project.
the class TrimmedEstimatorTest method testNormalDistribution.
@Test
public void testNormalDistribution() {
final double trim = .01;
NormalMOMEstimator mom = NormalMOMEstimator.STATIC;
// We could instantiate directly, but we also want to cover the
// parameterizer class.
ListParameterization config = new ListParameterization();
config.addParameter(TrimmedEstimator.Parameterizer.INNER_ID, mom);
config.addParameter(TrimmedEstimator.Parameterizer.TRIM_ID, trim);
TrimmedEstimator<NormalDistribution> est = ClassGenericsUtil.parameterizeOrAbort(TrimmedEstimator.class, config);
Random r = new Random(0L);
double[] data = new double[10000];
final int corrupt = (int) Math.floor(data.length * trim * .5);
for (int i = 0; i < data.length; i++) {
data[i] = i < corrupt ? 1e10 : r.nextGaussian();
}
NormalDistribution bad = mom.estimate(data, DoubleArrayAdapter.STATIC);
NormalDistribution good = est.estimate(data, DoubleArrayAdapter.STATIC);
assertEquals("Mean not as expected from naive estimator.", 5e7, bad.getMean(), 1e-2);
assertEquals("Stddev not as expected from naive estimator.", 7e8, bad.getStddev(), 1e7);
assertEquals("Mean not as expected from trimmed estimator.", 0, good.getMean(), 1e-2);
assertEquals("Stddev not as expected from trimmed estimator.", 1.0, good.getStddev(), 3e-2);
}
use of de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.ListParameterization in project elki by elki-project.
the class Log1PlusNormalizationTest method parameters.
/**
* Test with non-default parameters to ensure that both branches of the filter
* are tested.
*/
@Test
public void parameters() {
String filename = UNITTEST + "normalization-test-1.csv";
// Use the value of b as the boost value.
double b = 15.;
ListParameterization config = new ListParameterization();
config.addParameter(Log1PlusNormalization.Parameterizer.BOOST_ID, b);
Log1PlusNormalization<DoubleVector> filter = ClassGenericsUtil.parameterizeOrAbort(Log1PlusNormalization.class, config);
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
// Check dimensionalities
int dim = getFieldDimensionality(filteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
assertEquals("Dimensionality changed", dim, getFieldDimensionality(unfilteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD));
// Verify that the filtered and unfiltered bundles have the same length.
assertEquals("Length changed", filteredBundle.dataLength(), unfilteredBundle.dataLength());
// method.
for (int row = 0; row < filteredBundle.dataLength(); row++) {
DoubleVector dFil = get(filteredBundle, row, 0, DoubleVector.class);
DoubleVector dUnfil = get(unfilteredBundle, row, 0, DoubleVector.class);
for (int col = 0; col < dim; col++) {
final double vFil = dFil.doubleValue(col);
final double vUnfil = dUnfil.doubleValue(col);
assertEquals("Value not as expected", vFil, FastMath.log1p(Math.abs(vUnfil) * b) / FastMath.log1p(b), 1e-15);
}
}
}
use of de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.ListParameterization in project elki by elki-project.
the class CosineHashFunctionFamilyTest method createCosineHashFunction.
private LocalitySensitiveHashFunction<? super NumberVector> createCosineHashFunction(int numberOfProjections) {
ListParameterization params = new ListParameterization();
params.addParameter(CosineHashFunctionFamily.Parameterizer.RANDOM_ID, 0L);
params.addParameter(CosineHashFunctionFamily.Parameterizer.NUMPROJ_ID, numberOfProjections);
CosineHashFunctionFamily cosineFamily = ClassGenericsUtil.parameterizeOrAbort(CosineHashFunctionFamily.class, params);
LocalitySensitiveHashFunction<? super NumberVector> hashFunction = cosineFamily.generateHashFunctions(mockRelation(5), numberOfProjections).get(0);
return hashFunction;
}
use of de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.ListParameterization in project elki by elki-project.
the class OnlineLOF method getKNNAndRkNNQueries.
/**
* Get the kNN and rkNN queries for the algorithm.
*
* @param relation Data
* @param stepprog Progress logger
* @return the kNN and rkNN queries
*/
private Pair<Pair<KNNQuery<O>, KNNQuery<O>>, Pair<RKNNQuery<O>, RKNNQuery<O>>> getKNNAndRkNNQueries(Database database, Relation<O> relation, StepProgress stepprog) {
DistanceQuery<O> drefQ = database.getDistanceQuery(relation, referenceDistanceFunction);
// Use "HEAVY" flag, since this is an online algorithm
KNNQuery<O> kNNRefer = database.getKNNQuery(drefQ, krefer, DatabaseQuery.HINT_HEAVY_USE, DatabaseQuery.HINT_OPTIMIZED_ONLY, DatabaseQuery.HINT_NO_CACHE);
RKNNQuery<O> rkNNRefer = database.getRKNNQuery(drefQ, DatabaseQuery.HINT_HEAVY_USE, DatabaseQuery.HINT_OPTIMIZED_ONLY, DatabaseQuery.HINT_NO_CACHE);
// No optimized kNN query or RkNN query - use a preprocessor!
if (kNNRefer == null || rkNNRefer == null) {
if (stepprog != null) {
stepprog.beginStep(1, "Materializing neighborhood w.r.t. reference neighborhood distance function.", LOG);
}
MaterializeKNNAndRKNNPreprocessor<O> preproc = new MaterializeKNNAndRKNNPreprocessor<>(relation, referenceDistanceFunction, krefer);
kNNRefer = preproc.getKNNQuery(drefQ, krefer, DatabaseQuery.HINT_HEAVY_USE);
rkNNRefer = preproc.getRKNNQuery(drefQ, krefer, DatabaseQuery.HINT_HEAVY_USE);
// add as index
database.getHierarchy().add(relation, preproc);
} else {
if (stepprog != null) {
stepprog.beginStep(1, "Optimized neighborhood w.r.t. reference neighborhood distance function provided by database.", LOG);
}
}
DistanceQuery<O> dreachQ = database.getDistanceQuery(relation, reachabilityDistanceFunction);
KNNQuery<O> kNNReach = database.getKNNQuery(dreachQ, kreach, DatabaseQuery.HINT_HEAVY_USE, DatabaseQuery.HINT_OPTIMIZED_ONLY, DatabaseQuery.HINT_NO_CACHE);
RKNNQuery<O> rkNNReach = database.getRKNNQuery(dreachQ, DatabaseQuery.HINT_HEAVY_USE, DatabaseQuery.HINT_OPTIMIZED_ONLY, DatabaseQuery.HINT_NO_CACHE);
if (kNNReach == null || rkNNReach == null) {
if (stepprog != null) {
stepprog.beginStep(2, "Materializing neighborhood w.r.t. reachability distance function.", LOG);
}
ListParameterization config = new ListParameterization();
config.addParameter(AbstractMaterializeKNNPreprocessor.Factory.DISTANCE_FUNCTION_ID, reachabilityDistanceFunction);
config.addParameter(AbstractMaterializeKNNPreprocessor.Factory.K_ID, kreach);
MaterializeKNNAndRKNNPreprocessor<O> preproc = new MaterializeKNNAndRKNNPreprocessor<>(relation, reachabilityDistanceFunction, kreach);
kNNReach = preproc.getKNNQuery(dreachQ, kreach, DatabaseQuery.HINT_HEAVY_USE);
rkNNReach = preproc.getRKNNQuery(dreachQ, kreach, DatabaseQuery.HINT_HEAVY_USE);
// add as index
database.getHierarchy().add(relation, preproc);
}
Pair<KNNQuery<O>, KNNQuery<O>> kNNPair = new Pair<>(kNNRefer, kNNReach);
Pair<RKNNQuery<O>, RKNNQuery<O>> rkNNPair = new Pair<>(rkNNRefer, rkNNReach);
return new Pair<>(kNNPair, rkNNPair);
}
Aggregations