use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class FilteredAggregatorTest method testAggregateWithOrFilter.
@Test
public void testAggregateWithOrFilter() {
final float[] values = { 0.15f, 0.27f, 0.14f };
final TestFloatColumnSelector selector = new TestFloatColumnSelector(values);
FilteredAggregatorFactory factory = new FilteredAggregatorFactory(new DoubleSumAggregatorFactory("billy", "value"), new OrDimFilter(Lists.newArrayList(new SelectorDimFilter("dim", "a", null), new SelectorDimFilter("dim", "b", null))));
FilteredAggregator agg = (FilteredAggregator) factory.factorize(makeColumnSelector(selector));
double expectedFirst = new Float(values[0]).doubleValue();
double expectedSecond = new Float(values[1]).doubleValue() + expectedFirst;
double expectedThird = expectedSecond + new Float(values[2]).doubleValue();
assertValues(agg, selector, expectedFirst, expectedSecond, expectedThird);
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class CombineAndSimplifyBounds method doSimplify.
/**
* Simplify BoundDimFilters that are children of an OR or an AND.
*
* @param children the filters
* @param disjunction true for OR, false for AND
*
* @return simplified filters
*/
private static DimFilter doSimplify(final List<DimFilter> children, boolean disjunction) {
// Copy the list of child filters. We'll modify the copy and eventually return it.
final List<DimFilter> newChildren = Lists.newArrayList(children);
// Group Bound filters by dimension, extractionFn, and comparator and compute a RangeSet for each one.
final Map<BoundRefKey, List<BoundDimFilter>> bounds = new HashMap<>();
// all and/or filters have at least 1 child
boolean allFalse = true;
for (final DimFilter child : newChildren) {
if (child instanceof BoundDimFilter) {
final BoundDimFilter bound = (BoundDimFilter) child;
final BoundRefKey boundRefKey = BoundRefKey.from(bound);
final List<BoundDimFilter> filterList = bounds.computeIfAbsent(boundRefKey, k -> new ArrayList<>());
filterList.add(bound);
allFalse = false;
} else {
allFalse &= child instanceof FalseDimFilter;
}
}
// short circuit if can never be true
if (allFalse) {
return Filtration.matchNothing();
}
// Try to simplify filters within each group.
for (Map.Entry<BoundRefKey, List<BoundDimFilter>> entry : bounds.entrySet()) {
final BoundRefKey boundRefKey = entry.getKey();
final List<BoundDimFilter> filterList = entry.getValue();
// Create a RangeSet for this group.
final RangeSet<BoundValue> rangeSet = disjunction ? RangeSets.unionRanges(Bounds.toRanges(filterList)) : RangeSets.intersectRanges(Bounds.toRanges(filterList));
if (rangeSet.asRanges().size() < filterList.size()) {
// We found a simplification. Remove the old filters and add new ones.
for (final BoundDimFilter bound : filterList) {
if (!newChildren.remove(bound)) {
// Don't expect this to happen, but include it as a sanity check.
throw new ISE("Tried to remove bound, but couldn't");
}
}
if (rangeSet.asRanges().isEmpty()) {
// range set matches nothing, equivalent to FALSE
newChildren.add(Filtration.matchNothing());
}
for (final Range<BoundValue> range : rangeSet.asRanges()) {
if (!range.hasLowerBound() && !range.hasUpperBound()) {
// range matches all, equivalent to TRUE
newChildren.add(Filtration.matchEverything());
} else {
newChildren.add(Bounds.toFilter(boundRefKey, range));
}
}
}
}
// Finally: Go through newChildren, removing or potentially exiting early based on TRUE / FALSE marker filters.
Preconditions.checkState(newChildren.size() > 0, "newChildren.size > 0");
final Iterator<DimFilter> iterator = newChildren.iterator();
while (iterator.hasNext()) {
final DimFilter newChild = iterator.next();
if (Filtration.matchNothing().equals(newChild)) {
// AND with FALSE => always false, short circuit
if (disjunction) {
iterator.remove();
} else {
return Filtration.matchNothing();
}
} else if (Filtration.matchEverything().equals(newChild)) {
// AND with TRUE => ignore
if (disjunction) {
return Filtration.matchEverything();
} else {
iterator.remove();
}
}
}
if (newChildren.isEmpty()) {
// If "newChildren" is empty at this point, it must have consisted entirely of TRUE / FALSE marker filters.
if (disjunction) {
// Must have been all FALSE filters (the only kind we would have removed above).
return Filtration.matchNothing();
} else {
// Must have been all TRUE filters (the only kind we would have removed above).
return Filtration.matchEverything();
}
} else if (newChildren.size() == 1) {
return newChildren.get(0);
} else {
return disjunction ? new OrDimFilter(newChildren) : new AndDimFilter(newChildren);
}
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class BottomUpTransform method apply0.
private DimFilter apply0(final DimFilter filter) {
if (filter instanceof AndDimFilter) {
final List<DimFilter> oldFilters = ((AndDimFilter) filter).getFields();
final List<DimFilter> newFilters = new ArrayList<>();
for (DimFilter oldFilter : oldFilters) {
final DimFilter newFilter = apply0(oldFilter);
if (newFilter != null) {
newFilters.add(newFilter);
}
}
if (!newFilters.equals(oldFilters)) {
return checkedProcess(new AndDimFilter(newFilters));
} else {
return checkedProcess(filter);
}
} else if (filter instanceof OrDimFilter) {
final List<DimFilter> oldFilters = ((OrDimFilter) filter).getFields();
final List<DimFilter> newFilters = new ArrayList<>();
for (DimFilter oldFilter : oldFilters) {
final DimFilter newFilter = apply0(oldFilter);
if (newFilter != null) {
newFilters.add(newFilter);
}
}
if (!newFilters.equals(oldFilters)) {
return checkedProcess(new OrDimFilter(newFilters));
} else {
return checkedProcess(filter);
}
} else if (filter instanceof NotDimFilter) {
final DimFilter oldFilter = ((NotDimFilter) filter).getField();
final DimFilter newFilter = apply0(oldFilter);
if (!oldFilter.equals(newFilter)) {
return checkedProcess(new NotDimFilter(newFilter));
} else {
return checkedProcess(filter);
}
} else {
return checkedProcess(filter);
}
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class BloomDimFilterSqlTest method testBloomFilters.
@Test
public void testBloomFilters() throws Exception {
BloomKFilter filter = new BloomKFilter(1500);
filter.addString("def");
BloomKFilter filter2 = new BloomKFilter(1500);
filter.addString("abc");
byte[] bytes = BloomFilterSerializersModule.bloomKFilterToBytes(filter);
byte[] bytes2 = BloomFilterSerializersModule.bloomKFilterToBytes(filter2);
String base64 = StringUtils.encodeBase64String(bytes);
String base642 = StringUtils.encodeBase64String(bytes2);
testQuery(StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(dim1, '%s') OR bloom_filter_test(dim2, '%s')", base64, base642), ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).filters(new OrDimFilter(new BloomDimFilter("dim1", BloomKFilterHolder.fromBloomKFilter(filter), null), new BloomDimFilter("dim2", BloomKFilterHolder.fromBloomKFilter(filter2), null))).aggregators(aggregators(new CountAggregatorFactory("a0"))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 2L }));
}
use of org.apache.druid.query.filter.OrDimFilter in project druid by druid-io.
the class FilteredAggregatorBenchmark method setup.
/**
* Setup everything common for benchmarking both the incremental-index and the queriable-index.
*/
@Setup
public void setup() {
log.info("SETUP CALLED AT " + System.currentTimeMillis());
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get(schema);
generator = new DataGenerator(schemaInfo.getColumnSchemas(), RNG_SEED, schemaInfo.getDataInterval(), rowsPerSegment);
filter = new OrDimFilter(Arrays.asList(new BoundDimFilter("dimSequential", "-1", "-1", true, true, null, null, StringComparators.ALPHANUMERIC), new RegexDimFilter("dimSequential", "X", null), new SearchQueryDimFilter("dimSequential", new ContainsSearchQuerySpec("X", false), null), new InDimFilter("dimSequential", Collections.singletonList("X"), null)));
filteredMetric = new FilteredAggregatorFactory(new CountAggregatorFactory("rows"), filter);
factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryBenchmarkUtil.NOOP_QUERYWATCHER);
GeneratorSchemaInfo basicSchema = GeneratorBasicSchemas.SCHEMA_MAP.get("basic");
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
List<AggregatorFactory> queryAggs = Collections.singletonList(filteredMetric);
query = Druids.newTimeseriesQueryBuilder().dataSource("blah").granularity(Granularities.ALL).intervals(intervalSpec).aggregators(queryAggs).descending(descending).build();
}
Aggregations