use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class CombineAndSimplifyBounds method doSimplify.
/**
* Simplify BoundDimFilters that are children of an OR or an AND.
*
* @param children the filters
* @param disjunction true for OR, false for AND
*
* @return simplified filters
*/
private static DimFilter doSimplify(final List<DimFilter> children, boolean disjunction) {
// Copy the list of child filters. We'll modify the copy and eventually return it.
final List<DimFilter> newChildren = Lists.newArrayList(children);
// Group Bound filters by dimension, extractionFn, and comparator and compute a RangeSet for each one.
final Map<BoundRefKey, List<BoundDimFilter>> bounds = new HashMap<>();
// all and/or filters have at least 1 child
boolean allFalse = true;
for (final DimFilter child : newChildren) {
if (child instanceof BoundDimFilter) {
final BoundDimFilter bound = (BoundDimFilter) child;
final BoundRefKey boundRefKey = BoundRefKey.from(bound);
final List<BoundDimFilter> filterList = bounds.computeIfAbsent(boundRefKey, k -> new ArrayList<>());
filterList.add(bound);
allFalse = false;
} else {
allFalse &= child instanceof FalseDimFilter;
}
}
// short circuit if can never be true
if (allFalse) {
return Filtration.matchNothing();
}
// Try to simplify filters within each group.
for (Map.Entry<BoundRefKey, List<BoundDimFilter>> entry : bounds.entrySet()) {
final BoundRefKey boundRefKey = entry.getKey();
final List<BoundDimFilter> filterList = entry.getValue();
// Create a RangeSet for this group.
final RangeSet<BoundValue> rangeSet = disjunction ? RangeSets.unionRanges(Bounds.toRanges(filterList)) : RangeSets.intersectRanges(Bounds.toRanges(filterList));
if (rangeSet.asRanges().size() < filterList.size()) {
// We found a simplification. Remove the old filters and add new ones.
for (final BoundDimFilter bound : filterList) {
if (!newChildren.remove(bound)) {
// Don't expect this to happen, but include it as a sanity check.
throw new ISE("Tried to remove bound, but couldn't");
}
}
if (rangeSet.asRanges().isEmpty()) {
// range set matches nothing, equivalent to FALSE
newChildren.add(Filtration.matchNothing());
}
for (final Range<BoundValue> range : rangeSet.asRanges()) {
if (!range.hasLowerBound() && !range.hasUpperBound()) {
// range matches all, equivalent to TRUE
newChildren.add(Filtration.matchEverything());
} else {
newChildren.add(Bounds.toFilter(boundRefKey, range));
}
}
}
}
// Finally: Go through newChildren, removing or potentially exiting early based on TRUE / FALSE marker filters.
Preconditions.checkState(newChildren.size() > 0, "newChildren.size > 0");
final Iterator<DimFilter> iterator = newChildren.iterator();
while (iterator.hasNext()) {
final DimFilter newChild = iterator.next();
if (Filtration.matchNothing().equals(newChild)) {
// AND with FALSE => always false, short circuit
if (disjunction) {
iterator.remove();
} else {
return Filtration.matchNothing();
}
} else if (Filtration.matchEverything().equals(newChild)) {
// AND with TRUE => ignore
if (disjunction) {
return Filtration.matchEverything();
} else {
iterator.remove();
}
}
}
if (newChildren.isEmpty()) {
// If "newChildren" is empty at this point, it must have consisted entirely of TRUE / FALSE marker filters.
if (disjunction) {
// Must have been all FALSE filters (the only kind we would have removed above).
return Filtration.matchNothing();
} else {
// Must have been all TRUE filters (the only kind we would have removed above).
return Filtration.matchEverything();
}
} else if (newChildren.size() == 1) {
return newChildren.get(0);
} else {
return disjunction ? new OrDimFilter(newChildren) : new AndDimFilter(newChildren);
}
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class BottomUpTransform method apply0.
private DimFilter apply0(final DimFilter filter) {
if (filter instanceof AndDimFilter) {
final List<DimFilter> oldFilters = ((AndDimFilter) filter).getFields();
final List<DimFilter> newFilters = new ArrayList<>();
for (DimFilter oldFilter : oldFilters) {
final DimFilter newFilter = apply0(oldFilter);
if (newFilter != null) {
newFilters.add(newFilter);
}
}
if (!newFilters.equals(oldFilters)) {
return checkedProcess(new AndDimFilter(newFilters));
} else {
return checkedProcess(filter);
}
} else if (filter instanceof OrDimFilter) {
final List<DimFilter> oldFilters = ((OrDimFilter) filter).getFields();
final List<DimFilter> newFilters = new ArrayList<>();
for (DimFilter oldFilter : oldFilters) {
final DimFilter newFilter = apply0(oldFilter);
if (newFilter != null) {
newFilters.add(newFilter);
}
}
if (!newFilters.equals(oldFilters)) {
return checkedProcess(new OrDimFilter(newFilters));
} else {
return checkedProcess(filter);
}
} else if (filter instanceof NotDimFilter) {
final DimFilter oldFilter = ((NotDimFilter) filter).getField();
final DimFilter newFilter = apply0(oldFilter);
if (!oldFilter.equals(newFilter)) {
return checkedProcess(new NotDimFilter(newFilter));
} else {
return checkedProcess(filter);
}
} else {
return checkedProcess(filter);
}
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class ArrayContainsOperatorConversion method toDruidFilter.
@Nullable
@Override
public DimFilter toDruidFilter(final PlannerContext plannerContext, RowSignature rowSignature, @Nullable VirtualColumnRegistry virtualColumnRegistry, final RexNode rexNode) {
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(plannerContext, rowSignature, operands);
if (druidExpressions == null) {
return null;
}
// Converts array_contains() function into an AND of Selector filters if possible.
final DruidExpression leftExpr = druidExpressions.get(0);
final DruidExpression rightExpr = druidExpressions.get(1);
if (leftExpr.isSimpleExtraction()) {
Expr expr = Parser.parse(rightExpr.getExpression(), plannerContext.getExprMacroTable());
// different package.
if (expr.isLiteral()) {
// Evaluate the expression to get out the array elements.
// We can safely pass a noop ObjectBinding if the expression is literal.
ExprEval<?> exprEval = expr.eval(InputBindings.nilBindings());
String[] arrayElements = exprEval.asStringArray();
if (arrayElements == null || arrayElements.length == 0) {
// to create an empty array with no argument, we just return null.
return null;
} else if (arrayElements.length == 1) {
return newSelectorDimFilter(leftExpr.getSimpleExtraction(), arrayElements[0]);
} else {
final List<DimFilter> selectFilters = Arrays.stream(arrayElements).map(val -> newSelectorDimFilter(leftExpr.getSimpleExtraction(), val)).collect(Collectors.toList());
return new AndDimFilter(selectFilters);
}
}
}
return toExpressionFilter(plannerContext, getDruidFunctionName(), druidExpressions);
}
use of org.apache.druid.query.filter.AndDimFilter in project hive by apache.
the class DruidStorageHandlerUtils method addDynamicFilters.
public static org.apache.druid.query.Query addDynamicFilters(org.apache.druid.query.Query query, ExprNodeGenericFuncDesc filterExpr, Configuration conf, boolean resolveDynamicValues) {
List<VirtualColumn> virtualColumns = Arrays.asList(getVirtualColumns(query).getVirtualColumns());
org.apache.druid.query.Query rv = query;
DimFilter joinReductionFilter = toDruidFilter(filterExpr, conf, virtualColumns, resolveDynamicValues);
if (joinReductionFilter != null) {
String type = query.getType();
DimFilter filter = new AndDimFilter(joinReductionFilter, query.getFilter());
switch(type) {
case org.apache.druid.query.Query.TIMESERIES:
rv = Druids.TimeseriesQueryBuilder.copy((TimeseriesQuery) query).filters(filter).virtualColumns(VirtualColumns.create(virtualColumns)).build();
break;
case org.apache.druid.query.Query.TOPN:
rv = new TopNQueryBuilder((TopNQuery) query).filters(filter).virtualColumns(VirtualColumns.create(virtualColumns)).build();
break;
case org.apache.druid.query.Query.GROUP_BY:
rv = new GroupByQuery.Builder((GroupByQuery) query).setDimFilter(filter).setVirtualColumns(VirtualColumns.create(virtualColumns)).build();
break;
case org.apache.druid.query.Query.SCAN:
rv = Druids.ScanQueryBuilder.copy((ScanQuery) query).filters(filter).virtualColumns(VirtualColumns.create(virtualColumns)).build();
break;
default:
throw new UnsupportedOperationException("Unsupported Query type " + type);
}
}
return rv;
}
use of org.apache.druid.query.filter.AndDimFilter in project druid by druid-io.
the class SearchBenchmark method basicD.
private static SearchQueryBuilder basicD(final GeneratorSchemaInfo basicSchema) {
final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
final List<String> dimUniformFilterVals = new ArrayList<>();
final int resultNum = (int) (100000 * 0.1);
final int step = 100000 / resultNum;
for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) {
dimUniformFilterVals.add(String.valueOf(i));
}
final String dimName = "dimUniform";
final List<DimFilter> dimFilters = new ArrayList<>();
dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null));
dimFilters.add(new SelectorDimFilter(dimName, "3", null));
dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null));
return Druids.newSearchQueryBuilder().dataSource("blah").granularity(Granularities.ALL).intervals(intervalSpec).query("").dimensions(Collections.singletonList("dimUniform")).filters(new AndDimFilter(dimFilters));
}
Aggregations