Search in sources :

Example 71 with QueryRunner

use of org.apache.druid.query.QueryRunner in project druid by druid-io.

the class GroupByLimitPushDownMultiNodeMergeTest method getRunner2.

private List<QueryRunner<ResultRow>> getRunner2(int qIndexNumber) {
    List<QueryRunner<ResultRow>> runners = new ArrayList<>();
    QueryableIndex index2 = groupByIndices.get(qIndexNumber);
    QueryRunner<ResultRow> tooSmallRunner = makeQueryRunner(groupByFactory2, SegmentId.dummy(index2.toString()), new QueryableIndexSegment(index2, SegmentId.dummy(index2.toString())));
    runners.add(groupByFactory2.getToolchest().preMergeQueryDecoration(tooSmallRunner));
    return runners;
}
Also used : QueryableIndexSegment(org.apache.druid.segment.QueryableIndexSegment) QueryableIndex(org.apache.druid.segment.QueryableIndex) ArrayList(java.util.ArrayList) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner)

Example 72 with QueryRunner

use of org.apache.druid.query.QueryRunner in project druid by druid-io.

the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDown.

@Test
public void testMergeResultsWithLimitPushDown() {
    if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
        return;
    }
    GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.DESCENDING)), 5)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
    final GroupByQuery allGranQuery = builder.build();
    QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {

        @Override
        public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
            // simulate two daily segments
            final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
            final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
            return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
        }
    });
    Map<String, Object> context = new HashMap<>();
    List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L), makeRow(allGranQuery, "2011-04-02", "alias", "technology", "rows", 2L, "idx", 177L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(allGranQuery, "2011-04-02", "alias", "news", "rows", 2L, "idx", 221L), makeRow(allGranQuery, "2011-04-02", "alias", "mezzanine", "rows", 6L, "idx", 4420L));
    TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(QueryPlus.wrap(allGranQuery)), "merged");
}
Also used : DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) HashMap(java.util.HashMap) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Sequence(org.apache.druid.java.util.common.guava.Sequence) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ChainedExecutionQueryRunner(org.apache.druid.query.ChainedExecutionQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) ResponseContext(org.apache.druid.query.context.ResponseContext) QueryPlus(org.apache.druid.query.QueryPlus) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 73 with QueryRunner

use of org.apache.druid.query.QueryRunner in project druid by druid-io.

the class DataSourceMetadataQueryTest method testMaxIngestedEventTime.

@Test
public void testMaxIngestedEventTime() throws Exception {
    final IncrementalIndex rtIndex = new OnheapIncrementalIndex.Builder().setSimpleTestingIndexSchema(new CountAggregatorFactory("count")).setMaxRowCount(1000).build();
    final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(new DataSourceMetadataQueryRunnerFactory(new DataSourceQueryQueryToolChest(DefaultGenericQueryMetricsFactory.instance()), QueryRunnerTestHelper.NOOP_QUERYWATCHER), new IncrementalIndexSegment(rtIndex, SegmentId.dummy("test")), null);
    DateTime timestamp = DateTimes.nowUtc();
    rtIndex.add(new MapBasedInputRow(timestamp.getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x")));
    DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder().dataSource("testing").build();
    ResponseContext context = ConcurrentResponseContext.createEmpty();
    context.initializeMissingSegments();
    Iterable<Result<DataSourceMetadataResultValue>> results = runner.run(QueryPlus.wrap(dataSourceMetadataQuery), context).toList();
    DataSourceMetadataResultValue val = results.iterator().next().getValue();
    DateTime maxIngestedEventTime = val.getMaxIngestedEventTime();
    Assert.assertEquals(timestamp, maxIngestedEventTime);
}
Also used : IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) OnheapIncrementalIndex(org.apache.druid.segment.incremental.OnheapIncrementalIndex) QueryRunner(org.apache.druid.query.QueryRunner) DateTime(org.joda.time.DateTime) Result(org.apache.druid.query.Result) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ResponseContext(org.apache.druid.query.context.ResponseContext) ConcurrentResponseContext(org.apache.druid.query.context.ConcurrentResponseContext) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow) Test(org.junit.Test)

Example 74 with QueryRunner

use of org.apache.druid.query.QueryRunner in project druid by druid-io.

the class ScanQueryResultOrderingTest method assertResultsEquals.

private void assertResultsEquals(final ScanQuery query, final List<Integer> expectedResults) {
    final List<List<Pair<SegmentId, QueryRunner<ScanResultValue>>>> serverRunners = new ArrayList<>();
    for (int i = 0; i <= segmentToServerMap.stream().max(Comparator.naturalOrder()).orElse(0); i++) {
        serverRunners.add(new ArrayList<>());
    }
    for (int segmentNumber = 0; segmentNumber < segmentToServerMap.size(); segmentNumber++) {
        final SegmentId segmentId = SEGMENTS.get(segmentNumber).getId();
        final int serverNumber = segmentToServerMap.get(segmentNumber);
        serverRunners.get(serverNumber).add(Pair.of(segmentId, segmentRunners.get(segmentNumber)));
    }
    // Simulates what the Historical servers would do.
    final List<QueryRunner<ScanResultValue>> mergedServerRunners = serverRunners.stream().filter(runners -> !runners.isEmpty()).map(runners -> queryRunnerFactory.getToolchest().mergeResults(new QueryRunner<ScanResultValue>() {

        @Override
        public Sequence<ScanResultValue> run(final QueryPlus<ScanResultValue> queryPlus, final ResponseContext responseContext) {
            return queryRunnerFactory.mergeRunners(Execs.directExecutor(), runners.stream().map(p -> p.rhs).collect(Collectors.toList())).run(queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleSpecificSegmentSpec(runners.stream().map(p -> p.lhs.toDescriptor()).collect(Collectors.toList())))), responseContext);
        }
    })).collect(Collectors.toList());
    // Simulates what the Broker would do.
    final QueryRunner<ScanResultValue> brokerRunner = queryRunnerFactory.getToolchest().mergeResults((queryPlus, responseContext) -> {
        final List<Sequence<ScanResultValue>> sequences = mergedServerRunners.stream().map(runner -> runner.run(queryPlus.withoutThreadUnsafeState())).collect(Collectors.toList());
        return new MergeSequence<>(queryPlus.getQuery().getResultOrdering(), Sequences.simple(sequences));
    });
    // Finally: run the query.
    final List<Integer> results = runQuery((ScanQuery) Druids.ScanQueryBuilder.copy(query).limit(limit).batchSize(batchSize).build().withOverriddenContext(ImmutableMap.of(ScanQueryConfig.CTX_KEY_MAX_ROWS_QUEUED_FOR_ORDERING, maxRowsQueuedForOrdering)), brokerRunner);
    Assert.assertEquals(expectedResults.stream().limit(limit == 0 ? Long.MAX_VALUE : limit).collect(Collectors.toList()), results);
}
Also used : IntStream(java.util.stream.IntStream) QueryPlus(org.apache.druid.query.QueryPlus) Intervals(org.apache.druid.java.util.common.Intervals) RowBasedSegment(org.apache.druid.segment.RowBasedSegment) RunWith(org.junit.runner.RunWith) TreeSet(java.util.TreeSet) Pair(org.apache.druid.java.util.common.Pair) ArrayList(java.util.ArrayList) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) Druids(org.apache.druid.query.Druids) RowAdapter(org.apache.druid.segment.RowAdapter) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) ImmutableList(com.google.common.collect.ImmutableList) QueryRunner(org.apache.druid.query.QueryRunner) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Sequences(org.apache.druid.java.util.common.guava.Sequences) Parameterized(org.junit.runners.Parameterized) Before(org.junit.Before) ImmutableSortedSet(com.google.common.collect.ImmutableSortedSet) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) Execs(org.apache.druid.java.util.common.concurrent.Execs) ImmutableMap(com.google.common.collect.ImmutableMap) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) ResponseContext(org.apache.druid.query.context.ResponseContext) DateTime(org.joda.time.DateTime) Set(java.util.Set) Test(org.junit.Test) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) List(java.util.List) RowSignature(org.apache.druid.segment.column.RowSignature) ColumnType(org.apache.druid.segment.column.ColumnType) DefaultGenericQueryMetricsFactory(org.apache.druid.query.DefaultGenericQueryMetricsFactory) SegmentId(org.apache.druid.timeline.SegmentId) Assert(org.junit.Assert) Comparator(java.util.Comparator) Collections(java.util.Collections) MultipleSpecificSegmentSpec(org.apache.druid.query.spec.MultipleSpecificSegmentSpec) SegmentId(org.apache.druid.timeline.SegmentId) ArrayList(java.util.ArrayList) Sequence(org.apache.druid.java.util.common.guava.Sequence) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) QueryRunner(org.apache.druid.query.QueryRunner) MergeSequence(org.apache.druid.java.util.common.guava.MergeSequence) ResponseContext(org.apache.druid.query.context.ResponseContext) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List)

Example 75 with QueryRunner

use of org.apache.druid.query.QueryRunner in project druid by druid-io.

the class SpecificSegmentQueryRunnerTest method testRetry.

@Test
public void testRetry() throws Exception {
    final ObjectMapper mapper = new DefaultObjectMapper();
    SegmentDescriptor descriptor = new SegmentDescriptor(Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0);
    final SpecificSegmentQueryRunner queryRunner = new SpecificSegmentQueryRunner(new QueryRunner() {

        @Override
        public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
            return new Sequence() {

                @Override
                public Object accumulate(Object initValue, Accumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }

                @Override
                public Yielder<Object> toYielder(Object initValue, YieldingAccumulator accumulator) {
                    throw new SegmentMissingException("FAILSAUCE");
                }
            };
        }
    }, new SpecificSegmentSpec(descriptor));
    // from accumulate
    ResponseContext responseContext = ResponseContext.createEmpty();
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("foo").granularity(Granularities.ALL).intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))).aggregators(ImmutableList.of(new CountAggregatorFactory("rows"))).build();
    Sequence results = queryRunner.run(QueryPlus.wrap(query), responseContext);
    results.toList();
    validate(mapper, descriptor, responseContext);
    // from toYielder
    responseContext = ResponseContext.createEmpty();
    results = queryRunner.run(QueryPlus.wrap(query), responseContext);
    results.toYielder(null, new YieldingAccumulator() {

        final List lists = new ArrayList<>();

        @Override
        public Object accumulate(Object accumulated, Object in) {
            lists.add(in);
            return in;
        }
    });
    validate(mapper, descriptor, responseContext);
}
Also used : YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) ArrayList(java.util.ArrayList) SegmentMissingException(org.apache.druid.segment.SegmentMissingException) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) QueryRunner(org.apache.druid.query.QueryRunner) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) ResponseContext(org.apache.druid.query.context.ResponseContext) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) QueryPlus(org.apache.druid.query.QueryPlus) Test(org.junit.Test)

Aggregations

QueryRunner (org.apache.druid.query.QueryRunner)106 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)76 Test (org.junit.Test)69 ResponseContext (org.apache.druid.query.context.ResponseContext)38 QueryPlus (org.apache.druid.query.QueryPlus)36 ArrayList (java.util.ArrayList)34 Result (org.apache.druid.query.Result)33 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)30 QueryToolChest (org.apache.druid.query.QueryToolChest)28 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)28 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)25 Sequence (org.apache.druid.java.util.common.guava.Sequence)24 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)23 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)22 TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)20 Query (org.apache.druid.query.Query)18 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)17 BySegmentQueryRunner (org.apache.druid.query.BySegmentQueryRunner)16 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)16 QueryableIndexSegment (org.apache.druid.segment.QueryableIndexSegment)16