use of org.opensearch.search.aggregations.metrics.HyperLogLogPlusPlus in project anomaly-detection by opensearch-project.
the class NoPowermockSearchFeatureDaoTests method testParseBuckets.
@SuppressWarnings("rawtypes")
public void testParseBuckets() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException {
// cannot mock final class HyperLogLogPlusPlus
HyperLogLogPlusPlus hllpp = new HyperLogLogPlusPlus(randomIntBetween(AbstractHyperLogLog.MIN_PRECISION, AbstractHyperLogLog.MAX_PRECISION), new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()), 1);
hllpp.collect(0, BitMixer.mix64(randomIntBetween(1, 100)));
hllpp.collect(0, BitMixer.mix64(randomIntBetween(1, 100)));
Constructor ctor = null;
ctor = InternalCardinality.class.getDeclaredConstructor(String.class, AbstractHyperLogLogPlusPlus.class, Map.class);
ctor.setAccessible(true);
InternalCardinality cardinality = (InternalCardinality) ctor.newInstance("impactUniqueAccounts", hllpp, new HashMap<>());
// have to use reflection as all of InternalFilter's constructor are not public
ctor = InternalFilter.class.getDeclaredConstructor(String.class, long.class, InternalAggregations.class, Map.class);
ctor.setAccessible(true);
String featureId = "deny_max";
InternalFilter internalFilter = (InternalFilter) ctor.newInstance(featureId, 100, InternalAggregations.from(Arrays.asList(cardinality)), new HashMap<>());
InternalBucket bucket = new InternalFilters.InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.from(Arrays.asList(internalFilter)), true);
Optional<double[]> parsedResult = searchFeatureDao.parseBucket(bucket, Arrays.asList(featureId));
assertTrue(parsedResult.isPresent());
double[] parsedCardinality = parsedResult.get();
assertEquals(1, parsedCardinality.length);
assertEquals(2, parsedCardinality[0], 0.001);
// release MockBigArrays; otherwise, test will fail
Releasables.close(hllpp);
}
Aggregations