use of io.druid.query.topn.TopNQuery in project druid by druid-io.
the class MultiValuedDimensionTest method testTopNWithDimFilterAndWithFilteredDimSpec.
@Test
public void testTopNWithDimFilterAndWithFilteredDimSpec() throws Exception {
TopNQuery query = new TopNQueryBuilder().dataSource("xx").granularity(Granularities.ALL).dimension(new ListFilteredDimensionSpec(new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), null)).metric("count").intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Arrays.asList(new AggregatorFactory[] { new CountAggregatorFactory("count") })).threshold(5).filters(new SelectorDimFilter("tags", "t3", null)).build();
QueryRunnerFactory factory = new TopNQueryRunnerFactory(TestQueryRunners.getPool(), new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner<Result<TopNResultValue>> runner = QueryRunnerTestHelper.makeQueryRunner(factory, new QueryableIndexSegment("sid1", queryableIndex), null);
Map<String, Object> context = Maps.newHashMap();
Sequence<Result<TopNResultValue>> result = runner.run(query, context);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>of("tags", "t3", "count", 2L)))));
TestHelper.assertExpectedObjects(expectedResults, Sequences.toList(result, new ArrayList<Result<TopNResultValue>>()), "");
}
use of io.druid.query.topn.TopNQuery in project druid by druid-io.
the class AppendTest method testFilteredTopNSeries.
@Test
public void testFilteredTopNSeries() {
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put("market", "spot").put("rows", 1L).put("index", 100.0D).put("addRowsIndexConstant", 102.0D).put("uniques", 0.0D).put("maxIndex", 100.0).put("minIndex", 100.0).build()))));
TopNQuery query = makeFilteredTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
use of io.druid.query.topn.TopNQuery in project druid by druid-io.
the class AppendTest method testFilteredTopNSeries2.
@Test
public void testFilteredTopNSeries2() {
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Lists.<Map<String, Object>>newArrayList())));
TopNQuery query = makeFilteredTopNQuery();
QueryRunner runner = TestQueryRunners.makeTopNQueryRunner(segment2);
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
}
use of io.druid.query.topn.TopNQuery in project druid by druid-io.
the class QueryMaker method runQuery.
public Sequence<Object[]> runQuery(final DataSource dataSource, final RowSignature sourceRowSignature, final DruidQueryBuilder queryBuilder) {
if (dataSource instanceof QueryDataSource) {
final GroupByQuery outerQuery = queryBuilder.toGroupByQuery(dataSource, sourceRowSignature, plannerContext.getQueryContext());
if (outerQuery == null) {
// Bug in the planner rules. They shouldn't allow this to happen.
throw new IllegalStateException("Can't use QueryDataSource without an outer groupBy query!");
}
return executeGroupBy(queryBuilder, outerQuery);
}
final TimeseriesQuery timeseriesQuery = queryBuilder.toTimeseriesQuery(dataSource, sourceRowSignature, plannerContext.getQueryContext());
if (timeseriesQuery != null) {
return executeTimeseries(queryBuilder, timeseriesQuery);
}
final TopNQuery topNQuery = queryBuilder.toTopNQuery(dataSource, sourceRowSignature, plannerContext.getQueryContext(), plannerContext.getPlannerConfig().getMaxTopNLimit(), plannerContext.getPlannerConfig().isUseApproximateTopN());
if (topNQuery != null) {
return executeTopN(queryBuilder, topNQuery);
}
final GroupByQuery groupByQuery = queryBuilder.toGroupByQuery(dataSource, sourceRowSignature, plannerContext.getQueryContext());
if (groupByQuery != null) {
return executeGroupBy(queryBuilder, groupByQuery);
}
final SelectQuery selectQuery = queryBuilder.toSelectQuery(dataSource, sourceRowSignature, plannerContext.getQueryContext());
if (selectQuery != null) {
return executeSelect(queryBuilder, selectQuery);
}
throw new IllegalStateException("WTF?! Cannot execute query even though we planned it?");
}
use of io.druid.query.topn.TopNQuery in project druid by druid-io.
the class DruidQueryBuilder method toTopNQuery.
/**
* Return this query as a TopN query, or null if this query is not compatible with TopN.
*
* @param dataSource data source to query
* @param sourceRowSignature row signature of the dataSource
* @param context query context
* @param maxTopNLimit maxTopNLimit from a PlannerConfig
* @param useApproximateTopN from a PlannerConfig
*
* @return query or null
*/
public TopNQuery toTopNQuery(final DataSource dataSource, final RowSignature sourceRowSignature, final Map<String, Object> context, final int maxTopNLimit, final boolean useApproximateTopN) {
// Must have GROUP BY one column, ORDER BY zero or one column, limit less than maxTopNLimit, and no HAVING.
final boolean topNOk = grouping != null && grouping.getDimensions().size() == 1 && limitSpec != null && (limitSpec.getColumns().size() <= 1 && limitSpec.getLimit() <= maxTopNLimit) && having == null;
if (!topNOk) {
return null;
}
final DimensionSpec dimensionSpec = Iterables.getOnlyElement(grouping.getDimensions());
final OrderByColumnSpec limitColumn;
if (limitSpec.getColumns().isEmpty()) {
limitColumn = new OrderByColumnSpec(dimensionSpec.getOutputName(), OrderByColumnSpec.Direction.ASCENDING, StringComparators.LEXICOGRAPHIC);
} else {
limitColumn = Iterables.getOnlyElement(limitSpec.getColumns());
}
final TopNMetricSpec topNMetricSpec;
if (limitColumn.getDimension().equals(dimensionSpec.getOutputName())) {
// DimensionTopNMetricSpec is exact; always return it even if allowApproximate is false.
final DimensionTopNMetricSpec baseMetricSpec = new DimensionTopNMetricSpec(null, limitColumn.getDimensionComparator());
topNMetricSpec = limitColumn.getDirection() == OrderByColumnSpec.Direction.ASCENDING ? baseMetricSpec : new InvertedTopNMetricSpec(baseMetricSpec);
} else if (useApproximateTopN) {
// ORDER BY metric
final NumericTopNMetricSpec baseMetricSpec = new NumericTopNMetricSpec(limitColumn.getDimension());
topNMetricSpec = limitColumn.getDirection() == OrderByColumnSpec.Direction.ASCENDING ? new InvertedTopNMetricSpec(baseMetricSpec) : baseMetricSpec;
} else {
return null;
}
final Filtration filtration = Filtration.create(filter).optimize(sourceRowSignature);
return new TopNQuery(dataSource, VirtualColumns.EMPTY, Iterables.getOnlyElement(grouping.getDimensions()), topNMetricSpec, limitSpec.getLimit(), filtration.getQuerySegmentSpec(), filtration.getDimFilter(), Granularities.ALL, grouping.getAggregatorFactories(), grouping.getPostAggregators(), context);
}
Aggregations