Search in sources :

Example 11 with SegmentReference

use of org.apache.druid.segment.SegmentReference in project druid by druid-io.

the class SinkQuerySegmentWalker method getQueryRunnerForSegments.

@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
    // We only handle one particular dataSource. Make sure that's what we have, then ignore from here on out.
    final DataSourceAnalysis analysis = DataSourceAnalysis.forDataSource(query.getDataSource());
    // Sanity check: make sure the query is based on the table we're meant to handle.
    if (!analysis.getBaseTableDataSource().filter(ds -> dataSource.equals(ds.getName())).isPresent()) {
        throw new ISE("Cannot handle datasource: %s", analysis.getDataSource());
    }
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    if (factory == null) {
        throw new ISE("Unknown query type[%s].", query.getClass());
    }
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    final boolean skipIncrementalSegment = query.getContextValue(CONTEXT_SKIP_INCREMENTAL_SEGMENT, false);
    final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
    // Make sure this query type can handle the subquery, if present.
    if (analysis.isQuery() && !toolChest.canPerformSubquery(((QueryDataSource) analysis.getDataSource()).getQuery())) {
        throw new ISE("Cannot handle subquery: %s", analysis.getDataSource());
    }
    // segmentMapFn maps each base Segment into a joined Segment if necessary.
    final Function<SegmentReference, SegmentReference> segmentMapFn = joinableFactoryWrapper.createSegmentMapFn(analysis.getJoinBaseTableFilter().map(Filters::toFilter).orElse(null), analysis.getPreJoinableClauses(), cpuTimeAccumulator, analysis.getBaseQuery().orElse(query));
    // We compute the join cache key here itself so it doesn't need to be re-computed for every segment
    final Optional<byte[]> cacheKeyPrefix = analysis.isJoin() ? joinableFactoryWrapper.computeJoinDataSourceCacheKey(analysis) : Optional.of(StringUtils.EMPTY_BYTES);
    Iterable<QueryRunner<T>> perSegmentRunners = Iterables.transform(specs, descriptor -> {
        final PartitionChunk<Sink> chunk = sinkTimeline.findChunk(descriptor.getInterval(), descriptor.getVersion(), descriptor.getPartitionNumber());
        if (chunk == null) {
            return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
        }
        final Sink theSink = chunk.getObject();
        final SegmentId sinkSegmentId = theSink.getSegment().getId();
        Iterable<QueryRunner<T>> perHydrantRunners = new SinkQueryRunners<>(Iterables.transform(theSink, hydrant -> {
            // Hydrant might swap at any point, but if it's swapped at the start
            // then we know it's *definitely* swapped.
            final boolean hydrantDefinitelySwapped = hydrant.hasSwapped();
            if (skipIncrementalSegment && !hydrantDefinitelySwapped) {
                return new Pair<>(hydrant.getSegmentDataInterval(), new NoopQueryRunner<>());
            }
            // Prevent the underlying segment from swapping when its being iterated
            final Optional<Pair<SegmentReference, Closeable>> maybeSegmentAndCloseable = hydrant.getSegmentForQuery(segmentMapFn);
            // if optional isn't present, we failed to acquire reference to the segment or any joinables
            if (!maybeSegmentAndCloseable.isPresent()) {
                return new Pair<>(hydrant.getSegmentDataInterval(), new ReportTimelineMissingSegmentQueryRunner<>(descriptor));
            }
            final Pair<SegmentReference, Closeable> segmentAndCloseable = maybeSegmentAndCloseable.get();
            try {
                QueryRunner<T> runner = factory.createRunner(segmentAndCloseable.lhs);
                // 2) Hydrants are not the same between replicas, make sure cache is local
                if (hydrantDefinitelySwapped && cache.isLocal()) {
                    StorageAdapter storageAdapter = segmentAndCloseable.lhs.asStorageAdapter();
                    long segmentMinTime = storageAdapter.getMinTime().getMillis();
                    long segmentMaxTime = storageAdapter.getMaxTime().getMillis();
                    Interval actualDataInterval = Intervals.utc(segmentMinTime, segmentMaxTime + 1);
                    runner = new CachingQueryRunner<>(makeHydrantCacheIdentifier(hydrant), cacheKeyPrefix, descriptor, actualDataInterval, objectMapper, cache, toolChest, runner, // Always populate in foreground regardless of config
                    new ForegroundCachePopulator(objectMapper, cachePopulatorStats, cacheConfig.getMaxEntrySize()), cacheConfig);
                }
                // Make it always use Closeable to decrement()
                runner = QueryRunnerHelper.makeClosingQueryRunner(runner, segmentAndCloseable.rhs);
                return new Pair<>(segmentAndCloseable.lhs.getDataInterval(), runner);
            } catch (Throwable e) {
                throw CloseableUtils.closeAndWrapInCatch(e, segmentAndCloseable.rhs);
            }
        }));
        return new SpecificSegmentQueryRunner<>(withPerSinkMetrics(new BySegmentQueryRunner<>(sinkSegmentId, descriptor.getInterval().getStart(), factory.mergeRunners(DirectQueryProcessingPool.INSTANCE, perHydrantRunners)), toolChest, sinkSegmentId, cpuTimeAccumulator), new SpecificSegmentSpec(descriptor));
    });
    final QueryRunner<T> mergedRunner = toolChest.mergeResults(factory.mergeRunners(queryProcessingPool, perSegmentRunners));
    return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<>(mergedRunner, toolChest), toolChest, emitter, cpuTimeAccumulator, true);
}
Also used : DirectQueryProcessingPool(org.apache.druid.query.DirectQueryProcessingPool) QueryRunnerHelper(org.apache.druid.query.QueryRunnerHelper) QueryProcessingPool(org.apache.druid.query.QueryProcessingPool) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) StorageAdapter(org.apache.druid.segment.StorageAdapter) Pair(org.apache.druid.java.util.common.Pair) NoopQueryRunner(org.apache.druid.query.NoopQueryRunner) SegmentReference(org.apache.druid.segment.SegmentReference) SpecificSegmentQueryRunner(org.apache.druid.query.spec.SpecificSegmentQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) CacheConfig(org.apache.druid.client.cache.CacheConfig) StringUtils(org.apache.druid.java.util.common.StringUtils) JoinableFactoryWrapper(org.apache.druid.segment.join.JoinableFactoryWrapper) ISE(org.apache.druid.java.util.common.ISE) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) SinkQueryRunners(org.apache.druid.query.SinkQueryRunners) QueryDataSource(org.apache.druid.query.QueryDataSource) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) Optional(java.util.Optional) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) SegmentId(org.apache.druid.timeline.SegmentId) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) Iterables(com.google.common.collect.Iterables) Intervals(org.apache.druid.java.util.common.Intervals) QueryMetrics(org.apache.druid.query.QueryMetrics) CachingQueryRunner(org.apache.druid.client.CachingQueryRunner) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) Function(java.util.function.Function) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) MetricsEmittingQueryRunner(org.apache.druid.query.MetricsEmittingQueryRunner) Query(org.apache.druid.query.Query) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) Sink(org.apache.druid.segment.realtime.plumber.Sink) QuerySegmentWalker(org.apache.druid.query.QuerySegmentWalker) EmittingLogger(org.apache.druid.java.util.emitter.EmittingLogger) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryToolChest(org.apache.druid.query.QueryToolChest) FireHydrant(org.apache.druid.segment.realtime.FireHydrant) AtomicLong(java.util.concurrent.atomic.AtomicLong) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) Closeable(java.io.Closeable) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Cache(org.apache.druid.client.cache.Cache) Filters(org.apache.druid.segment.filter.Filters) CloseableUtils(org.apache.druid.utils.CloseableUtils) CPUTimeMetricQueryRunner(org.apache.druid.query.CPUTimeMetricQueryRunner) Query(org.apache.druid.query.Query) Closeable(java.io.Closeable) StorageAdapter(org.apache.druid.segment.StorageAdapter) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) Filters(org.apache.druid.segment.filter.Filters) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) Sink(org.apache.druid.segment.realtime.plumber.Sink) SpecificSegmentQueryRunner(org.apache.druid.query.spec.SpecificSegmentQueryRunner) NoopQueryRunner(org.apache.druid.query.NoopQueryRunner) CachingQueryRunner(org.apache.druid.client.CachingQueryRunner) ISE(org.apache.druid.java.util.common.ISE) Pair(org.apache.druid.java.util.common.Pair) Optional(java.util.Optional) SegmentId(org.apache.druid.timeline.SegmentId) SegmentReference(org.apache.druid.segment.SegmentReference) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) NoopQueryRunner(org.apache.druid.query.NoopQueryRunner) SpecificSegmentQueryRunner(org.apache.druid.query.spec.SpecificSegmentQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) BySegmentQueryRunner(org.apache.druid.query.BySegmentQueryRunner) CachingQueryRunner(org.apache.druid.client.CachingQueryRunner) MetricsEmittingQueryRunner(org.apache.druid.query.MetricsEmittingQueryRunner) CPUTimeMetricQueryRunner(org.apache.druid.query.CPUTimeMetricQueryRunner) AtomicLong(java.util.concurrent.atomic.AtomicLong) SinkQueryRunners(org.apache.druid.query.SinkQueryRunners) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) ForegroundCachePopulator(org.apache.druid.client.cache.ForegroundCachePopulator) Interval(org.joda.time.Interval)

Example 12 with SegmentReference

use of org.apache.druid.segment.SegmentReference in project druid by druid-io.

the class FireHydrant method getSegmentForQuery.

/**
 * This method is like a combined form of {@link #getIncrementedSegment} and {@link #getAndIncrementSegment} that
 * deals in {@link SegmentReference} instead of directly with {@link ReferenceCountingSegment} in order to acquire
 * reference count for both hydrant's segment and any tracked joinables taking part in the query.
 */
public Optional<Pair<SegmentReference, Closeable>> getSegmentForQuery(Function<SegmentReference, SegmentReference> segmentMapFn) {
    ReferenceCountingSegment sinkSegment = adapter.get();
    SegmentReference segment = segmentMapFn.apply(sinkSegment);
    while (true) {
        Optional<Closeable> reference = segment.acquireReferences();
        if (reference.isPresent()) {
            return Optional.of(new Pair<>(segment, reference.get()));
        }
        // segment.acquireReferences() returned false, means it is closed. Since close() in swapSegment() happens after
        // segment swap, the new segment should already be visible.
        ReferenceCountingSegment newSinkSegment = adapter.get();
        if (newSinkSegment == null) {
            throw new ISE("FireHydrant was 'closed' by swapping segment to null while acquiring a segment");
        }
        if (sinkSegment == newSinkSegment) {
            if (newSinkSegment.isClosed()) {
                throw new ISE("segment.close() is called somewhere outside FireHydrant.swapSegment()");
            }
            // of a HashJoinSegment created by segmentMapFn
            return Optional.empty();
        }
        segment = segmentMapFn.apply(newSinkSegment);
    // Spin loop.
    }
}
Also used : ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) Closeable(java.io.Closeable) SegmentReference(org.apache.druid.segment.SegmentReference) ISE(org.apache.druid.java.util.common.ISE)

Example 13 with SegmentReference

use of org.apache.druid.segment.SegmentReference in project druid by druid-io.

the class ServerManagerForQueryErrorTest method buildQueryRunnerForSegment.

@Override
protected <T> QueryRunner<T> buildQueryRunnerForSegment(Query<T> query, SegmentDescriptor descriptor, QueryRunnerFactory<T, Query<T>> factory, QueryToolChest<T, Query<T>> toolChest, VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline, Function<SegmentReference, SegmentReference> segmentMapFn, AtomicLong cpuTimeAccumulator, Optional<byte[]> cacheKeyPrefix) {
    if (query.getContextBoolean(QUERY_RETRY_TEST_CONTEXT_KEY, false)) {
        final MutableBoolean isIgnoreSegment = new MutableBoolean(false);
        queryToIgnoredSegments.compute(query.getMostSpecificId(), (queryId, ignoredSegments) -> {
            if (ignoredSegments == null) {
                ignoredSegments = new HashSet<>();
            }
            if (ignoredSegments.size() < MAX_NUM_FALSE_MISSING_SEGMENTS_REPORTS) {
                ignoredSegments.add(descriptor);
                isIgnoreSegment.setTrue();
            }
            return ignoredSegments;
        });
        if (isIgnoreSegment.isTrue()) {
            LOG.info("Pretending I don't have segment[%s]", descriptor);
            return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
        }
    } else if (query.getContextBoolean(QUERY_TIMEOUT_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new QueryTimeoutException("query timeout test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new QueryTimeoutException("query timeout test");
            }
        };
    } else if (query.getContextBoolean(QUERY_CAPACITY_EXCEEDED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity exceeded test");
            }
        };
    } else if (query.getContextBoolean(QUERY_UNSUPPORTED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new QueryUnsupportedException("query unsupported test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new QueryUnsupportedException("query unsupported test");
            }
        };
    } else if (query.getContextBoolean(RESOURCE_LIMIT_EXCEEDED_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new ResourceLimitExceededException("resource limit exceeded test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new ResourceLimitExceededException("resource limit exceeded test");
            }
        };
    } else if (query.getContextBoolean(QUERY_FAILURE_TEST_CONTEXT_KEY, false)) {
        return (queryPlus, responseContext) -> new Sequence<T>() {

            @Override
            public <OutType> OutType accumulate(OutType initValue, Accumulator<OutType, T> accumulator) {
                throw new RuntimeException("query failure test");
            }

            @Override
            public <OutType> Yielder<OutType> toYielder(OutType initValue, YieldingAccumulator<OutType, T> accumulator) {
                throw new RuntimeException("query failure test");
            }
        };
    }
    return super.buildQueryRunnerForSegment(query, descriptor, factory, toolChest, timeline, segmentMapFn, cpuTimeAccumulator, cacheKeyPrefix);
}
Also used : Logger(org.apache.druid.java.util.common.logger.Logger) SegmentManager(org.apache.druid.server.SegmentManager) Inject(com.google.inject.Inject) Smile(org.apache.druid.guice.annotations.Smile) QueryProcessingPool(org.apache.druid.query.QueryProcessingPool) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) Function(java.util.function.Function) QueryCapacityExceededException(org.apache.druid.query.QueryCapacityExceededException) HashSet(java.util.HashSet) SegmentReference(org.apache.druid.segment.SegmentReference) Query(org.apache.druid.query.Query) QueryRunner(org.apache.druid.query.QueryRunner) CachePopulator(org.apache.druid.client.cache.CachePopulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) ServerConfig(org.apache.druid.server.initialization.ServerConfig) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CacheConfig(org.apache.druid.client.cache.CacheConfig) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryToolChest(org.apache.druid.query.QueryToolChest) Set(java.util.Set) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) AtomicLong(java.util.concurrent.atomic.AtomicLong) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ServiceEmitter(org.apache.druid.java.util.emitter.service.ServiceEmitter) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException) Optional(java.util.Optional) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Cache(org.apache.druid.client.cache.Cache) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) Yielder(org.apache.druid.java.util.common.guava.Yielder) QueryUnsupportedException(org.apache.druid.query.QueryUnsupportedException) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) ReportTimelineMissingSegmentQueryRunner(org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner) ResourceLimitExceededException(org.apache.druid.query.ResourceLimitExceededException)

Example 14 with SegmentReference

use of org.apache.druid.segment.SegmentReference in project druid by druid-io.

the class FireHydrantTest method testGetSegmentForQueryButNotAbleToAcquireReferences.

@Test
public void testGetSegmentForQueryButNotAbleToAcquireReferences() {
    ReferenceCountingSegment incrementalSegmentReference = hydrant.getHydrantSegment();
    Assert.assertEquals(0, incrementalSegmentReference.getNumReferences());
    Optional<Pair<SegmentReference, Closeable>> maybeSegmentAndCloseable = hydrant.getSegmentForQuery(segmentReference -> new SegmentReference() {

        @Override
        public Optional<Closeable> acquireReferences() {
            return Optional.empty();
        }

        @Override
        public SegmentId getId() {
            return incrementalIndexSegment.getId();
        }

        @Override
        public Interval getDataInterval() {
            return incrementalIndexSegment.getDataInterval();
        }

        @Nullable
        @Override
        public QueryableIndex asQueryableIndex() {
            return incrementalIndexSegment.asQueryableIndex();
        }

        @Override
        public StorageAdapter asStorageAdapter() {
            return incrementalIndexSegment.asStorageAdapter();
        }

        @Override
        public void close() {
            incrementalIndexSegment.close();
        }
    });
    Assert.assertFalse(maybeSegmentAndCloseable.isPresent());
    Assert.assertEquals(0, incrementalSegmentReference.getNumReferences());
}
Also used : ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) Optional(java.util.Optional) SegmentId(org.apache.druid.timeline.SegmentId) QueryableIndex(org.apache.druid.segment.QueryableIndex) SegmentReference(org.apache.druid.segment.SegmentReference) StorageAdapter(org.apache.druid.segment.StorageAdapter) Nullable(javax.annotation.Nullable) Pair(org.apache.druid.java.util.common.Pair) Interval(org.joda.time.Interval) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 15 with SegmentReference

use of org.apache.druid.segment.SegmentReference in project druid by druid-io.

the class TestClusterQuerySegmentWalker method getQueryRunnerForSegments.

@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    if (factory == null) {
        throw new ISE("Unknown query type[%s].", query.getClass());
    }
    final DataSourceAnalysis analysis = DataSourceAnalysis.forDataSource(query.getDataSource());
    if (!analysis.isConcreteTableBased()) {
        throw new ISE("Cannot handle datasource: %s", query.getDataSource());
    }
    final String dataSourceName = ((TableDataSource) analysis.getBaseDataSource()).getName();
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    // Make sure this query type can handle the subquery, if present.
    if (analysis.isQuery() && !toolChest.canPerformSubquery(((QueryDataSource) analysis.getDataSource()).getQuery())) {
        throw new ISE("Cannot handle subquery: %s", analysis.getDataSource());
    }
    final Function<SegmentReference, SegmentReference> segmentMapFn = joinableFactoryWrapper.createSegmentMapFn(analysis.getJoinBaseTableFilter().map(Filters::toFilter).orElse(null), analysis.getPreJoinableClauses(), new AtomicLong(), analysis.getBaseQuery().orElse(query));
    final QueryRunner<T> baseRunner = new FinalizeResultsQueryRunner<>(toolChest.postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(makeTableRunner(toolChest, factory, getSegmentsForTable(dataSourceName, specs), segmentMapFn)))), toolChest);
    // to actually serve the queries
    return (theQuery, responseContext) -> {
        responseContext.initializeRemainingResponses();
        responseContext.addRemainingResponse(theQuery.getQuery().getMostSpecificId(), 0);
        if (scheduler != null) {
            Set<SegmentServerSelector> segments = new HashSet<>();
            specs.forEach(spec -> segments.add(new SegmentServerSelector(spec)));
            return scheduler.run(scheduler.prioritizeAndLaneQuery(theQuery, segments), new LazySequence<>(() -> baseRunner.run(theQuery.withQuery(Queries.withSpecificSegments(theQuery.getQuery(), ImmutableList.copyOf(specs))), responseContext)));
        } else {
            return baseRunner.run(theQuery.withQuery(Queries.withSpecificSegments(theQuery.getQuery(), ImmutableList.copyOf(specs))), responseContext);
        }
    };
}
Also used : DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) JoinableFactory(org.apache.druid.segment.join.JoinableFactory) Function(java.util.function.Function) NoopQueryRunner(org.apache.druid.query.NoopQueryRunner) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) SegmentReference(org.apache.druid.segment.SegmentReference) PartitionChunk(org.apache.druid.timeline.partition.PartitionChunk) Interval(org.joda.time.Interval) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) Query(org.apache.druid.query.Query) SpecificSegmentQueryRunner(org.apache.druid.query.spec.SpecificSegmentQueryRunner) Map(java.util.Map) SegmentServerSelector(org.apache.druid.client.SegmentServerSelector) QueryRunner(org.apache.druid.query.QueryRunner) QuerySegmentWalker(org.apache.druid.query.QuerySegmentWalker) Nullable(javax.annotation.Nullable) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) LazySequence(org.apache.druid.java.util.common.guava.LazySequence) Execs(org.apache.druid.java.util.common.concurrent.Execs) VersionedIntervalTimeline(org.apache.druid.timeline.VersionedIntervalTimeline) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) QueryToolChest(org.apache.druid.query.QueryToolChest) JoinableFactoryWrapper(org.apache.druid.segment.join.JoinableFactoryWrapper) TimelineObjectHolder(org.apache.druid.timeline.TimelineObjectHolder) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) SpecificSegmentSpec(org.apache.druid.query.spec.SpecificSegmentSpec) ReferenceCountingSegment(org.apache.druid.segment.ReferenceCountingSegment) TableDataSource(org.apache.druid.query.TableDataSource) Queries(org.apache.druid.query.Queries) AtomicLong(java.util.concurrent.atomic.AtomicLong) QueryDataSource(org.apache.druid.query.QueryDataSource) List(java.util.List) ReferenceCountingSegmentQueryRunner(org.apache.druid.query.ReferenceCountingSegmentQueryRunner) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) Preconditions(com.google.common.base.Preconditions) FunctionalIterable(org.apache.druid.java.util.common.guava.FunctionalIterable) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) Filters(org.apache.druid.segment.filter.Filters) Collections(java.util.Collections) HashSet(java.util.HashSet) Set(java.util.Set) Query(org.apache.druid.query.Query) SegmentReference(org.apache.druid.segment.SegmentReference) DataSourceAnalysis(org.apache.druid.query.planning.DataSourceAnalysis) Filters(org.apache.druid.segment.filter.Filters) AtomicLong(java.util.concurrent.atomic.AtomicLong) TableDataSource(org.apache.druid.query.TableDataSource) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) SegmentServerSelector(org.apache.druid.client.SegmentServerSelector) ISE(org.apache.druid.java.util.common.ISE) LazySequence(org.apache.druid.java.util.common.guava.LazySequence)

Aggregations

SegmentReference (org.apache.druid.segment.SegmentReference)15 AtomicLong (java.util.concurrent.atomic.AtomicLong)10 ReferenceCountingSegment (org.apache.druid.segment.ReferenceCountingSegment)10 Optional (java.util.Optional)7 Query (org.apache.druid.query.Query)7 Test (org.junit.Test)7 ISE (org.apache.druid.java.util.common.ISE)6 DataSourceAnalysis (org.apache.druid.query.planning.DataSourceAnalysis)6 Filters (org.apache.druid.segment.filter.Filters)6 Closeable (java.io.Closeable)5 Function (java.util.function.Function)5 Pair (org.apache.druid.java.util.common.Pair)5 QueryRunner (org.apache.druid.query.QueryRunner)5 VersionedIntervalTimeline (org.apache.druid.timeline.VersionedIntervalTimeline)5 Interval (org.joda.time.Interval)5 Nullable (javax.annotation.Nullable)4 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)4 NoopQueryRunner (org.apache.druid.query.NoopQueryRunner)4 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)3 Preconditions (com.google.common.base.Preconditions)3