use of de.lmu.ifi.dbs.elki.data.DoubleVector in project elki by elki-project.
the class Log1PlusNormalizationTest method parameters.
/**
* Test with non-default parameters to ensure that both branches of the filter
* are tested.
*/
@Test
public void parameters() {
String filename = UNITTEST + "normalization-test-1.csv";
// Use the value of b as the boost value.
double b = 15.;
ListParameterization config = new ListParameterization();
config.addParameter(Log1PlusNormalization.Parameterizer.BOOST_ID, b);
Log1PlusNormalization<DoubleVector> filter = ClassGenericsUtil.parameterizeOrAbort(Log1PlusNormalization.class, config);
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
// Check dimensionalities
int dim = getFieldDimensionality(filteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
assertEquals("Dimensionality changed", dim, getFieldDimensionality(unfilteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD));
// Verify that the filtered and unfiltered bundles have the same length.
assertEquals("Length changed", filteredBundle.dataLength(), unfilteredBundle.dataLength());
// method.
for (int row = 0; row < filteredBundle.dataLength(); row++) {
DoubleVector dFil = get(filteredBundle, row, 0, DoubleVector.class);
DoubleVector dUnfil = get(unfilteredBundle, row, 0, DoubleVector.class);
for (int col = 0; col < dim; col++) {
final double vFil = dFil.doubleValue(col);
final double vUnfil = dUnfil.doubleValue(col);
assertEquals("Value not as expected", vFil, FastMath.log1p(Math.abs(vUnfil) * b) / FastMath.log1p(b), 1e-15);
}
}
}
use of de.lmu.ifi.dbs.elki.data.DoubleVector in project elki by elki-project.
the class ClassicMultidimensionalScalingTransformTest method parameters.
/**
* Test with parameters.
*/
@Test
public void parameters() {
int pdim = 2;
String filename = UNITTEST + "transformation-test-1.csv";
ClassicMultidimensionalScalingTransform<DoubleVector, DoubleVector> filter = //
new ELKIBuilder<ClassicMultidimensionalScalingTransform<DoubleVector, DoubleVector>>(ClassicMultidimensionalScalingTransform.class).with(ClassicMultidimensionalScalingTransform.Parameterizer.DIM_ID, //
pdim).with(ClassicMultidimensionalScalingTransform.Parameterizer.DISTANCE_ID, //
EuclideanDistanceFunction.class).build();
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
int dimu = getFieldDimensionality(unfilteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
int dimf = getFieldDimensionality(filteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
assertEquals("Dimensionality not as requested", pdim, dimf);
// Verify that the Euclidean distance between any two points is identical
// before and after the MDS transform is performed - O(n^2)!
// Calculate the covariance matricies of the filtered and unfiltered
// bundles.
CovarianceMatrix cmUnfil = new CovarianceMatrix(dimu);
CovarianceMatrix cmFil = new CovarianceMatrix(dimf);
for (int outer = 0; outer < filteredBundle.dataLength(); outer++) {
DoubleVector dFil_1 = get(filteredBundle, outer, 0, DoubleVector.class);
DoubleVector dUnfil_1 = get(unfilteredBundle, outer, 0, DoubleVector.class);
cmUnfil.put(dUnfil_1);
cmFil.put(dFil_1);
for (int row = outer + 1; row < filteredBundle.dataLength(); row++) {
DoubleVector dFil_2 = get(filteredBundle, row, 0, DoubleVector.class);
DoubleVector dUnfil_2 = get(unfilteredBundle, row, 0, DoubleVector.class);
final double distF = EuclideanDistanceFunction.STATIC.distance(dFil_1, dFil_2);
final double distU = EuclideanDistanceFunction.STATIC.distance(dUnfil_1, dUnfil_2);
assertEquals("Expected same distance", distU, distF, 1e-11);
}
}
// Calculate the SVD of the covariance matrix of the unfiltered data.
// Verify that this SVD represents the diagonals of the covariance matrix of
// the filtered data.
double[][] ncmUnfil = cmUnfil.destroyToPopulationMatrix();
double[][] ncmFil = cmFil.destroyToPopulationMatrix();
SingularValueDecomposition svd = new SingularValueDecomposition(ncmUnfil);
double[] dia = svd.getSingularValues();
for (int ii = 0; ii < dia.length; ii++) {
assertEquals("Unexpected covariance", dia[ii], ncmFil[ii][ii], 1e-11);
}
}
use of de.lmu.ifi.dbs.elki.data.DoubleVector in project elki by elki-project.
the class FastMultidimensionalScalingTransformTest method parameters.
/**
* Test with parameters.
*/
@Test
public void parameters() {
int pdim = 2;
String filename = UNITTEST + "transformation-test-1.csv";
FastMultidimensionalScalingTransform<DoubleVector, DoubleVector> filter = //
new ELKIBuilder<FastMultidimensionalScalingTransform<DoubleVector, DoubleVector>>(FastMultidimensionalScalingTransform.class).with(ClassicMultidimensionalScalingTransform.Parameterizer.DIM_ID, //
pdim).with(FastMultidimensionalScalingTransform.Parameterizer.RANDOM_ID, //
0L).with(ClassicMultidimensionalScalingTransform.Parameterizer.DISTANCE_ID, //
EuclideanDistanceFunction.class).build();
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
int dimu = getFieldDimensionality(unfilteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
int dimf = getFieldDimensionality(filteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
assertEquals("Dimensionality not as requested", pdim, dimf);
// Verify that the Euclidean distance between any two points is identical
// before and after the MDS transform is performed - O(n^2)!
// Calculate the covariance matricies of the filtered and unfiltered
// bundles.
CovarianceMatrix cmUnfil = new CovarianceMatrix(dimu);
CovarianceMatrix cmFil = new CovarianceMatrix(dimf);
for (int outer = 0; outer < filteredBundle.dataLength(); outer++) {
DoubleVector dFil_1 = get(filteredBundle, outer, 0, DoubleVector.class);
DoubleVector dUnfil_1 = get(unfilteredBundle, outer, 0, DoubleVector.class);
cmUnfil.put(dUnfil_1);
cmFil.put(dFil_1);
for (int row = outer + 1; row < filteredBundle.dataLength(); row++) {
DoubleVector dFil_2 = get(filteredBundle, row, 0, DoubleVector.class);
DoubleVector dUnfil_2 = get(unfilteredBundle, row, 0, DoubleVector.class);
final double distF = EuclideanDistanceFunction.STATIC.distance(dFil_1, dFil_2);
final double distU = EuclideanDistanceFunction.STATIC.distance(dUnfil_1, dUnfil_2);
assertEquals("Expected same distance", distU, distF, 1e-10);
}
}
// Calculate the SVD of the covariance matrix of the unfiltered data.
// Verify that this SVD represents the diagonals of the covariance matrix of
// the filtered data.
double[][] ncmUnfil = cmUnfil.destroyToPopulationMatrix();
double[][] ncmFil = cmFil.destroyToPopulationMatrix();
SingularValueDecomposition svd = new SingularValueDecomposition(ncmUnfil);
double[] dia = svd.getSingularValues();
for (int ii = 0; ii < dia.length; ii++) {
assertEquals("Unexpected covariance", dia[ii], ncmFil[ii][ii], 1e-8);
}
}
use of de.lmu.ifi.dbs.elki.data.DoubleVector in project elki by elki-project.
the class HistogramJitterFilterTest method parameters.
/**
* Test with seed of 0 and given jitter amount.
*/
@Test
public void parameters() {
String filename = UNITTEST + "transformation-test-1.csv";
// Use the value of s as the seed value and j as the jitter amount.
final double s = 0.;
final double j = .01;
HistogramJitterFilter<DoubleVector> filter = //
new ELKIBuilder<>(HistogramJitterFilter.class).with(HistogramJitterFilter.Parameterizer.SEED_ID, //
s).with(HistogramJitterFilter.Parameterizer.JITTER_ID, //
j).build();
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
int dim = getFieldDimensionality(filteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD);
assertEquals("Dimensionality changed", dim, getFieldDimensionality(unfilteredBundle, 0, TypeUtil.NUMBER_VECTOR_FIELD));
// Verify that the filtered and unfiltered bundles have the same length.
assertEquals("Test file interpreted incorrectly", filteredBundle.dataLength(), unfilteredBundle.dataLength());
// Verify that at least p% of the values are within a% of the unfiltered
// value.
final double p = .9, a = .1;
int withinRange = 0;
for (int row = 0; row < filteredBundle.dataLength(); row++) {
DoubleVector dFil = get(filteredBundle, row, 0, DoubleVector.class);
DoubleVector dUnfil = get(unfilteredBundle, row, 0, DoubleVector.class);
for (int col = 0; col < dim; col++) {
final double vFil = dFil.doubleValue(col);
final double vUnfil = dUnfil.doubleValue(col);
if (Math.abs((vFil / vUnfil) - 1.) <= a) {
withinRange++;
}
}
}
assertEquals("Too many values have moved too much", 1., withinRange / (double) (dim * filteredBundle.dataLength()), 1. - p);
}
use of de.lmu.ifi.dbs.elki.data.DoubleVector in project elki by elki-project.
the class VectorDimensionalityFilterTest method parameters.
/**
* Test with parameter dim_keep as the dimensionality of the vectors to leave.
*/
@Test
public void parameters() {
final int dim_keep = 10;
String filename = UNITTEST + "dimensionality-test-2.csv";
VectorDimensionalityFilter<DoubleVector> filter = //
new ELKIBuilder<VectorDimensionalityFilter<DoubleVector>>(VectorDimensionalityFilter.class).with(VectorDimensionalityFilter.Parameterizer.DIM_P, dim_keep).build();
MultipleObjectsBundle filteredBundle = readBundle(filename, filter);
// Load the test data again without a filter.
MultipleObjectsBundle unfilteredBundle = readBundle(filename);
// Verify that the filter has removed the vectors of the wrong
// dimensionality.
boolean foundTooSmall = false;
for (int row = 0; row < unfilteredBundle.dataLength(); row++) {
Object obj = unfilteredBundle.data(row, 0);
assertEquals("Unexpected data type", DoubleVector.class, obj.getClass());
DoubleVector d = (DoubleVector) obj;
if (d.getDimensionality() != dim_keep) {
foundTooSmall = true;
break;
}
}
assertTrue("Expected a vector with filterable dimensionality", foundTooSmall);
assertTrue("Expected smaller data length", filteredBundle.dataLength() < unfilteredBundle.dataLength());
}
Aggregations