use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsOptimizedDimextraction.
@Test
public void testBySegmentResultsOptimizedDimextraction() {
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec("quality", "alias", new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("mezzanine", "mezzanine0"), false), false, null, true, false))).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue<ResultRow>> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine0", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-dim-extraction");
exec.shutdownNow();
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class NestedQueryPushDownTest method runNestedQueryWithForcePushDown.
private Sequence<ResultRow> runNestedQueryWithForcePushDown(GroupByQuery nestedQuery) {
ResponseContext context = ResponseContext.createEmpty();
QueryToolChest<ResultRow, GroupByQuery> toolChest = groupByFactory.getToolchest();
GroupByQuery pushDownQuery = nestedQuery;
QueryRunner<ResultRow> segment1Runner = new FinalizeResultsQueryRunner<ResultRow>(toolChest.mergeResults(groupByFactory.mergeRunners(executorService, getQueryRunnerForSegment1())), (QueryToolChest) toolChest);
QueryRunner<ResultRow> segment2Runner = new FinalizeResultsQueryRunner<ResultRow>(toolChest.mergeResults(groupByFactory2.mergeRunners(executorService, getQueryRunnerForSegment2())), (QueryToolChest) toolChest);
QueryRunner<ResultRow> queryRunnerForSegments = new FinalizeResultsQueryRunner<>(toolChest.mergeResults((queryPlus, responseContext) -> Sequences.simple(ImmutableList.of(Sequences.map(segment1Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn((GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing())), Sequences.map(segment2Runner.run(queryPlus, responseContext), toolChest.makePreComputeManipulatorFn((GroupByQuery) queryPlus.getQuery(), MetricManipulatorFns.deserializing())))).flatMerge(Function.identity(), queryPlus.getQuery().getResultOrdering())), (QueryToolChest) toolChest);
GroupByStrategy strategy = ((GroupByQueryRunnerFactory) groupByFactory).getStrategySelector().strategize(nestedQuery);
// Historicals execute the query with force push down flag as false
GroupByQuery queryWithPushDownDisabled = pushDownQuery.withOverriddenContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY, false));
Sequence<ResultRow> pushDownQueryResults = strategy.mergeResults(queryRunnerForSegments, queryWithPushDownDisabled, context);
return toolChest.mergeResults((queryPlus, responseContext) -> pushDownQueryResults).run(QueryPlus.wrap(nestedQuery), context);
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class GroupByQueryRunnerTest method testBySegmentResultsWithAllFiltersWithExtractionFns.
@Test
public void testBySegmentResultsWithAllFiltersWithExtractionFns() {
String extractionJsFn = "function(str) { return 'super-' + str; }";
String jsFn = "function(x) { return(x === 'super-mezzanine') }";
ExtractionFn extractionFn = new JavaScriptExtractionFn(extractionJsFn, false, JavaScriptConfig.getEnabledInstance());
List<DimFilter> superFilterList = new ArrayList<>();
superFilterList.add(new SelectorDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new InDimFilter("quality", Arrays.asList("not-super-mezzanine", "FOOBAR", "super-mezzanine"), extractionFn));
superFilterList.add(new BoundDimFilter("quality", "super-mezzanine", "super-mezzanine", false, false, true, extractionFn, StringComparators.ALPHANUMERIC));
superFilterList.add(new RegexDimFilter("quality", "super-mezzanine", extractionFn));
superFilterList.add(new SearchQueryDimFilter("quality", new ContainsSearchQuerySpec("super-mezzanine", true), extractionFn));
superFilterList.add(new JavaScriptDimFilter("quality", jsFn, extractionFn, JavaScriptConfig.getEnabledInstance()));
DimFilter superFilter = new AndDimFilter(superFilterList);
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(superFilter).overrideContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
final GroupByQuery fullQuery = builder.build();
int segmentCount = 32;
Result<BySegmentResultValue> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
List<Result> bySegmentResults = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
bySegmentResults.add(singleSegmentResult);
}
QueryToolChest toolChest = factory.getToolchest();
List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
for (int i = 0; i < segmentCount; i++) {
singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
}
ExecutorService exec = Executors.newCachedThreadPool();
QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment-filter");
exec.shutdownNow();
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class GroupByQueryRunnerTestHelper method runQuery.
public static <T> Iterable<T> runQuery(QueryRunnerFactory factory, QueryRunner runner, Query<T> query) {
QueryToolChest toolChest = factory.getToolchest();
QueryRunner<T> theRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner)), toolChest);
Sequence<T> queryResult = theRunner.run(QueryPlus.wrap(query));
return queryResult.toList();
}
use of org.apache.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class GroupByTimeseriesQueryRunnerTest method constructorFeeder.
@SuppressWarnings("unchecked")
@Parameterized.Parameters(name = "{0}, vectorize = {1}")
public static Iterable<Object[]> constructorFeeder() {
GroupByQueryConfig config = new GroupByQueryConfig();
config.setMaxIntermediateRows(10000);
final Pair<GroupByQueryRunnerFactory, Closer> factoryAndCloser = GroupByQueryRunnerTest.makeQueryRunnerFactory(config);
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
RESOURCE_CLOSER.register(factoryAndCloser.rhs);
final List<Object[]> constructors = new ArrayList<>();
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
final QueryRunner modifiedRunner = new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
TimeseriesQuery tsQuery = (TimeseriesQuery) queryPlus.getQuery();
QueryRunner<ResultRow> newRunner = factory.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner));
QueryToolChest toolChest = factory.getToolchest();
newRunner = new FinalizeResultsQueryRunner<>(toolChest.mergeResults(toolChest.preMergeQueryDecoration(newRunner)), toolChest);
final String timeDimension = tsQuery.getTimestampResultField();
final List<VirtualColumn> virtualColumns = new ArrayList<>(Arrays.asList(tsQuery.getVirtualColumns().getVirtualColumns()));
Map<String, Object> theContext = tsQuery.getContext();
if (timeDimension != null) {
theContext = new HashMap<>(tsQuery.getContext());
final PeriodGranularity granularity = (PeriodGranularity) tsQuery.getGranularity();
virtualColumns.add(new ExpressionVirtualColumn("v0", StringUtils.format("timestamp_floor(__time, '%s')", granularity.getPeriod()), ColumnType.LONG, TestExprMacroTable.INSTANCE));
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD, timeDimension);
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_GRANULARITY, granularity);
theContext.put(GroupByQuery.CTX_TIMESTAMP_RESULT_FIELD_INDEX, 0);
}
GroupByQuery newQuery = GroupByQuery.builder().setDataSource(tsQuery.getDataSource()).setQuerySegmentSpec(tsQuery.getQuerySegmentSpec()).setGranularity(tsQuery.getGranularity()).setDimFilter(tsQuery.getDimensionsFilter()).setDimensions(timeDimension == null ? ImmutableList.of() : ImmutableList.of(new DefaultDimensionSpec("v0", timeDimension, ColumnType.LONG))).setAggregatorSpecs(tsQuery.getAggregatorSpecs()).setPostAggregatorSpecs(tsQuery.getPostAggregatorSpecs()).setVirtualColumns(VirtualColumns.create(virtualColumns)).setContext(theContext).build();
return Sequences.map(newRunner.run(queryPlus.withQuery(newQuery), responseContext), new Function<ResultRow, Result<TimeseriesResultValue>>() {
@Override
public Result<TimeseriesResultValue> apply(final ResultRow input) {
final MapBasedRow mapBasedRow = input.toMapBasedRow(newQuery);
return new Result<>(mapBasedRow.getTimestamp(), new TimeseriesResultValue(mapBasedRow.getEvent()));
}
});
}
@Override
public String toString() {
return runner.toString();
}
};
for (boolean vectorize : ImmutableList.of(false, true)) {
// Add vectorization tests for any indexes that support it.
if (!vectorize || QueryRunnerTestHelper.isTestRunnerVectorizable(runner)) {
constructors.add(new Object[] { modifiedRunner, vectorize });
}
}
}
return constructors;
}
Aggregations