use of org.apache.druid.query.groupby.having.DimFilterHavingSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithHavingSpecOnLongAndFloat.
@Test
public void testGroupByWithHavingSpecOnLongAndFloat() {
if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
}
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ColumnType.LONG), new DefaultDimensionSpec("__time", "time_alias", ColumnType.LONG), new DefaultDimensionSpec("index", "index_alias", ColumnType.FLOAT)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setHavingSpec(new DimFilterHavingSpec(new AndDimFilter(Lists.newArrayList(new SelectorDimFilter("ql_alias", "1400", null), new SelectorDimFilter("time_alias", "1301616000000", null), new BoundDimFilter("index_alias", "1310.0", "1320.0", true, true, null, null, StringComparators.NUMERIC))), null)).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
List<ResultRow> expectedResults = Collections.singletonList(makeRow(query, "2011-04-01", "alias", "total_market", "time_alias", 1301616000000L, "index_alias", 1314.8397, "ql_alias", 1400L, "rows", 1L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "havingspec-long-float");
}
use of org.apache.druid.query.groupby.having.DimFilterHavingSpec in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testResultLevelCacheKeyWithHavingDimFilterHavingSpec.
@Test
public void testResultLevelCacheKeyWithHavingDimFilterHavingSpec() {
final DimFilterHavingSpec havingSpec1 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final DimFilterHavingSpec havingSpec2 = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "317", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
final GroupByQuery query1 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec1).build();
final GroupByQuery query2 = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setPostAggregatorSpecs(ImmutableList.of(new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("post", OrderByColumnSpec.Direction.DESCENDING)), Integer.MAX_VALUE)).setHavingSpec(havingSpec2).build();
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy1 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query1);
final CacheStrategy<ResultRow, Object, GroupByQuery> strategy2 = new GroupByQueryQueryToolChest(null).getCacheStrategy(query2);
Assert.assertTrue(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.groupby.having.DimFilterHavingSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testDimFilterHavingSpecWithExtractionFns.
@Test
public void testDimFilterHavingSpecWithExtractionFns() {
String extractionJsFn = "function(str) { return 'super-' + str; }";
ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
String extractionJsFn2 = "function(num) { return num + 10; }";
ExtractionFn extractionFn2 = new JavaScriptExtractionFn(extractionJsFn2, false, JavaScriptConfig.getEnabledInstance());
final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "12", null, true, false, null, extractionFn2, StringComparators.NUMERIC), new SelectorDimFilter("idx", "super-217", extractionFn))), null);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(havingSpec);
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L));
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "extractionfn-havingspec");
}
use of org.apache.druid.query.groupby.having.DimFilterHavingSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testDimFilterHavingSpec.
@Test
public void testDimFilterHavingSpec() {
final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.INDEX_LONG_MIN, QueryRunnerTestHelper.INDEX_LONG_MAX, QueryRunnerTestHelper.INDEX_DOUBLE_MIN, QueryRunnerTestHelper.INDEX_DOUBLE_MAX, QueryRunnerTestHelper.INDEX_FLOAT_MIN, QueryRunnerTestHelper.INDEX_FLOAT_MAX).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(havingSpec);
final GroupByQuery fullQuery = builder.build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 105L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 112L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 105.735462D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 112.987027D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 105.73546F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 112.98703F), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 107L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1193L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 107.047773D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1193.556278D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 107.047775F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1193.5563F), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 122L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1321L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 122.141707D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1321.375057D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 122.14171F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1321.375F));
TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "dimfilter-havingspec");
}
use of org.apache.druid.query.groupby.having.DimFilterHavingSpec in project druid by druid-io.
the class DruidQuery method toGroupByQuery.
/**
* Return this query as a GroupBy query, or null if this query is not compatible with GroupBy.
*
* @return query or null
*/
@Nullable
private GroupByQuery toGroupByQuery(final QueryFeatureInspector queryFeatureInspector) {
if (grouping == null) {
return null;
}
if (sorting != null && sorting.getOffsetLimit().hasLimit() && sorting.getOffsetLimit().getLimit() <= 0) {
// Cannot handle zero or negative limits.
return null;
}
final Pair<DataSource, Filtration> dataSourceFiltrationPair = getFiltration(dataSource, filter, virtualColumnRegistry);
final DataSource newDataSource = dataSourceFiltrationPair.lhs;
final Filtration filtration = dataSourceFiltrationPair.rhs;
final DimFilterHavingSpec havingSpec;
if (grouping.getHavingFilter() != null) {
havingSpec = new DimFilterHavingSpec(Filtration.create(grouping.getHavingFilter()).optimizeFilterOnly(grouping.getOutputRowSignature()).getDimFilter(), true);
} else {
havingSpec = null;
}
final List<PostAggregator> postAggregators = new ArrayList<>(grouping.getPostAggregators());
if (sorting != null && sorting.getProjection() != null) {
postAggregators.addAll(sorting.getProjection().getPostAggregators());
}
GroupByQuery query = new GroupByQuery(newDataSource, filtration.getQuerySegmentSpec(), getVirtualColumns(true), filtration.getDimFilter(), Granularities.ALL, grouping.getDimensionSpecs(), grouping.getAggregatorFactories(), postAggregators, havingSpec, Optional.ofNullable(sorting).orElse(Sorting.none()).limitSpec(), grouping.getSubtotals().toSubtotalsSpec(grouping.getDimensionSpecs()), ImmutableSortedMap.copyOf(plannerContext.getQueryContext()));
// We don't apply timestamp computation optimization yet when limit is pushed down. Maybe someday.
if (query.getLimitSpec() instanceof DefaultLimitSpec && query.isApplyLimitPushDown()) {
return query;
}
Map<String, Object> theContext = new HashMap<>();
Granularity queryGranularity = null;
// now, part of the query plan logic is handled in GroupByStrategyV2.
if (!grouping.getDimensions().isEmpty()) {
for (DimensionExpression dimensionExpression : grouping.getDimensions()) {
Granularity granularity = Expressions.toQueryGranularity(dimensionExpression.getDruidExpression(), plannerContext.getExprMacroTable());
if (granularity == null) {
continue;
}
if (queryGranularity != null) {
// group by more than one timestamp_floor
// eg: group by timestamp_floor(__time to DAY),timestamp_floor(__time, to HOUR)
queryGranularity = null;
break;
}
queryGranularity = granularity;
int timestampDimensionIndexInDimensions = grouping.getDimensions().indexOf(dimensionExpression);
// these settings will only affect the most inner query sent to the down streaming compute nodes
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD, dimensionExpression.getOutputName());
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_INDEX, timestampDimensionIndexInDimensions);
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_GRANULARITY, queryGranularity);
}
}
if (queryGranularity == null) {
return query;
}
return query.withOverriddenContext(theContext);
}
Aggregations