use of org.apache.druid.query.filter.DimFilter in project druid by druid-io.
the class DruidQuery method computeGrouping.
@Nonnull
private static Grouping computeGrouping(final PartialDruidQuery partialQuery, final PlannerContext plannerContext, final RowSignature rowSignature, final VirtualColumnRegistry virtualColumnRegistry, final RexBuilder rexBuilder, final boolean finalizeAggregations) {
final Aggregate aggregate = Preconditions.checkNotNull(partialQuery.getAggregate(), "aggregate");
final Project aggregateProject = partialQuery.getAggregateProject();
final List<DimensionExpression> dimensions = computeDimensions(partialQuery, plannerContext, rowSignature, virtualColumnRegistry);
final Subtotals subtotals = computeSubtotals(partialQuery, rowSignature);
final List<Aggregation> aggregations = computeAggregations(partialQuery, plannerContext, rowSignature, virtualColumnRegistry, rexBuilder, finalizeAggregations);
final RowSignature aggregateRowSignature = RowSignatures.fromRelDataType(ImmutableList.copyOf(Iterators.concat(dimensions.stream().map(DimensionExpression::getOutputName).iterator(), aggregations.stream().map(Aggregation::getOutputName).iterator())), aggregate.getRowType());
final DimFilter havingFilter = computeHavingFilter(partialQuery, plannerContext, aggregateRowSignature);
final Grouping grouping = Grouping.create(dimensions, subtotals, aggregations, havingFilter, aggregateRowSignature);
if (aggregateProject == null) {
return grouping;
} else {
return grouping.applyProject(plannerContext, aggregateProject);
}
}
use of org.apache.druid.query.filter.DimFilter in project druid by druid-io.
the class DruidQuery method fromPartialQuery.
public static DruidQuery fromPartialQuery(final PartialDruidQuery partialQuery, final DataSource dataSource, final RowSignature sourceRowSignature, final PlannerContext plannerContext, final RexBuilder rexBuilder, final boolean finalizeAggregations, @Nullable VirtualColumnRegistry virtualColumnRegistry) {
final RelDataType outputRowType = partialQuery.leafRel().getRowType();
if (virtualColumnRegistry == null) {
virtualColumnRegistry = VirtualColumnRegistry.create(sourceRowSignature, plannerContext.getExprMacroTable());
}
// Now the fun begins.
final DimFilter filter;
final Projection selectProjection;
final Grouping grouping;
final Sorting sorting;
if (partialQuery.getWhereFilter() != null) {
filter = Preconditions.checkNotNull(computeWhereFilter(partialQuery, plannerContext, sourceRowSignature, virtualColumnRegistry));
} else {
filter = null;
}
// reflect select-project from partialQuery on its own.)
if (partialQuery.getSelectProject() != null && partialQuery.getAggregate() == null) {
selectProjection = Preconditions.checkNotNull(computeSelectProjection(partialQuery, plannerContext, computeOutputRowSignature(sourceRowSignature, null, null, null), virtualColumnRegistry));
} else {
selectProjection = null;
}
if (partialQuery.getAggregate() != null) {
grouping = Preconditions.checkNotNull(computeGrouping(partialQuery, plannerContext, computeOutputRowSignature(sourceRowSignature, null, null, null), virtualColumnRegistry, rexBuilder, finalizeAggregations));
} else {
grouping = null;
}
if (partialQuery.getSort() != null) {
sorting = Preconditions.checkNotNull(computeSorting(partialQuery, plannerContext, computeOutputRowSignature(sourceRowSignature, selectProjection, grouping, null), // When sorting follows grouping, virtual columns cannot be used
partialQuery.getAggregate() != null ? null : virtualColumnRegistry));
} else {
sorting = null;
}
return new DruidQuery(dataSource, plannerContext, filter, selectProjection, grouping, sorting, sourceRowSignature, outputRowType, virtualColumnRegistry);
}
use of org.apache.druid.query.filter.DimFilter in project druid by druid-io.
the class NativeQueryMaker method runQuery.
@Override
public Sequence<Object[]> runQuery(final DruidQuery druidQuery) {
final Query<?> query = druidQuery.getQuery();
if (plannerContext.getPlannerConfig().isRequireTimeCondition() && !(druidQuery.getDataSource() instanceof InlineDataSource)) {
if (Intervals.ONLY_ETERNITY.equals(findBaseDataSourceIntervals(query))) {
throw new CannotBuildQueryException("requireTimeCondition is enabled, all queries must include a filter condition on the __time column");
}
}
int numFilters = plannerContext.getPlannerConfig().getMaxNumericInFilters();
// Instead of IN(v1,v2,v3) user should specify IN('v1','v2','v3')
if (numFilters != PlannerConfig.NUM_FILTER_NOT_USED) {
if (query.getFilter() instanceof OrDimFilter) {
OrDimFilter orDimFilter = (OrDimFilter) query.getFilter();
int numBoundFilters = 0;
for (DimFilter filter : orDimFilter.getFields()) {
numBoundFilters += filter instanceof BoundDimFilter ? 1 : 0;
}
if (numBoundFilters > numFilters) {
String dimension = ((BoundDimFilter) (orDimFilter.getFields().get(0))).getDimension();
throw new UOE(StringUtils.format("The number of values in the IN clause for [%s] in query exceeds configured maxNumericFilter limit of [%s] for INs. Cast [%s] values of IN clause to String", dimension, numFilters, orDimFilter.getFields().size()));
}
}
}
final List<String> rowOrder;
if (query instanceof TimeseriesQuery && !druidQuery.getGrouping().getDimensions().isEmpty()) {
// Hack for timeseries queries: when generating them, DruidQuery.toTimeseriesQuery translates a dimension
// based on a timestamp_floor expression into a 'granularity'. This is not reflected in the druidQuery's
// output row signature, so we have to account for it here.
// TODO: We can remove this once https://github.com/apache/druid/issues/9974 is done.
final String timeDimension = Iterables.getOnlyElement(druidQuery.getGrouping().getDimensions()).getOutputName();
rowOrder = druidQuery.getOutputRowSignature().getColumnNames().stream().map(f -> timeDimension.equals(f) ? ColumnHolder.TIME_COLUMN_NAME : f).collect(Collectors.toList());
} else {
rowOrder = druidQuery.getOutputRowSignature().getColumnNames();
}
final List<SqlTypeName> columnTypes = druidQuery.getOutputRowType().getFieldList().stream().map(f -> f.getType().getSqlTypeName()).collect(Collectors.toList());
return execute(query, mapColumnList(rowOrder, fieldMapping), mapColumnList(columnTypes, fieldMapping));
}
use of org.apache.druid.query.filter.DimFilter in project druid by druid-io.
the class DruidQueryTest method test_filtration_intervalsInBothFilters.
@Test
public void test_filtration_intervalsInBothFilters() {
DataSource dataSource = join(JoinType.INNER, filterWithInterval);
DataSource expectedDataSource = join(JoinType.INNER, selectorFilter);
DimFilter queryFilter = new AndDimFilter(otherFilter, new BoundDimFilter("__time", "150", "250", false, true, null, null, StringComparators.NUMERIC));
Pair<DataSource, Filtration> pair = DruidQuery.getFiltration(dataSource, queryFilter, VirtualColumnRegistry.create(RowSignature.empty(), TestExprMacroTable.INSTANCE));
verify(pair, expectedDataSource, otherFilter, Intervals.utc(150, 200));
}
use of org.apache.druid.query.filter.DimFilter in project druid by druid-io.
the class ServerManagerTest method testGetQueryRunnerForSegmentsForUnknownQueryThrowingException.
@Test
public void testGetQueryRunnerForSegmentsForUnknownQueryThrowingException() {
final Interval interval = Intervals.of("P1d/2011-04-01");
final List<SegmentDescriptor> descriptors = Collections.singletonList(new SegmentDescriptor(interval, "1", 0));
expectedException.expect(QueryUnsupportedException.class);
expectedException.expectMessage("Unknown query type");
serverManager.getQueryRunnerForSegments(new BaseQuery<Object>(new TableDataSource("test"), new MultipleSpecificSegmentSpec(descriptors), false, new HashMap<>()) {
@Override
public boolean hasFilters() {
return false;
}
@Override
public DimFilter getFilter() {
return null;
}
@Override
public String getType() {
return null;
}
@Override
public Query<Object> withOverriddenContext(Map<String, Object> contextOverride) {
return null;
}
@Override
public Query<Object> withQuerySegmentSpec(QuerySegmentSpec spec) {
return null;
}
@Override
public Query<Object> withDataSource(DataSource dataSource) {
return null;
}
}, descriptors);
}
Aggregations