Search in sources :

Example 1 with SpecificSegmentSpec

use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.

the class RealtimeManager method getQueryRunnerForSegments.

@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    final Map<Integer, FireChief> partitionChiefs = chiefs.get(Iterables.getOnlyElement(query.getDataSource().getNames()));
    return partitionChiefs == null ? new NoopQueryRunner<T>() : factory.getToolchest().mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), Iterables.transform(specs, new Function<SegmentDescriptor, QueryRunner<T>>() {

        @Override
        public QueryRunner<T> apply(SegmentDescriptor spec) {
            final FireChief retVal = partitionChiefs.get(spec.getPartitionNumber());
            return retVal == null ? new NoopQueryRunner<T>() : retVal.getQueryRunner(query.withQuerySegmentSpec(new SpecificSegmentSpec(spec)));
        }
    })));
}
Also used : Function(com.google.common.base.Function) Query(io.druid.query.Query) SpecificSegmentSpec(io.druid.query.spec.SpecificSegmentSpec) SegmentDescriptor(io.druid.query.SegmentDescriptor)

Example 2 with SpecificSegmentSpec

use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.

the class SinkQuerySegmentWalker method getQueryRunnerForSegments.

@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
    // We only handle one particular dataSource. Make sure that's what we have, then ignore from here on out.
    if (!(query.getDataSource() instanceof TableDataSource) || !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) {
        log.makeAlert("Received query for unknown dataSource").addData("dataSource", query.getDataSource()).emit();
        return new NoopQueryRunner<>();
    }
    final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
    if (factory == null) {
        throw new ISE("Unknown query type[%s].", query.getClass());
    }
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = new Function<Query<T>, ServiceMetricEvent.Builder>() {

        @Override
        public ServiceMetricEvent.Builder apply(@Nullable Query<T> input) {
            return toolChest.makeMetricBuilder(query);
        }
    };
    final boolean skipIncrementalSegment = query.getContextValue(CONTEXT_SKIP_INCREMENTAL_SEGMENT, false);
    final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
    return CPUTimeMetricQueryRunner.safeBuild(toolChest.mergeResults(factory.mergeRunners(queryExecutorService, FunctionalIterable.create(specs).transform(new Function<SegmentDescriptor, QueryRunner<T>>() {

        @Override
        public QueryRunner<T> apply(final SegmentDescriptor descriptor) {
            final PartitionHolder<Sink> holder = sinkTimeline.findEntry(descriptor.getInterval(), descriptor.getVersion());
            if (holder == null) {
                return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
            }
            final PartitionChunk<Sink> chunk = holder.getChunk(descriptor.getPartitionNumber());
            if (chunk == null) {
                return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
            }
            final Sink theSink = chunk.getObject();
            final String sinkSegmentIdentifier = theSink.getSegment().getIdentifier();
            return new SpecificSegmentQueryRunner<>(withPerSinkMetrics(new BySegmentQueryRunner<>(sinkSegmentIdentifier, descriptor.getInterval().getStart(), factory.mergeRunners(MoreExecutors.sameThreadExecutor(), Iterables.transform(theSink, new Function<FireHydrant, QueryRunner<T>>() {

                @Override
                public QueryRunner<T> apply(final FireHydrant hydrant) {
                    // Hydrant might swap at any point, but if it's swapped at the start
                    // then we know it's *definitely* swapped.
                    final boolean hydrantDefinitelySwapped = hydrant.hasSwapped();
                    if (skipIncrementalSegment && !hydrantDefinitelySwapped) {
                        return new NoopQueryRunner<>();
                    }
                    // Prevent the underlying segment from swapping when its being iterated
                    final Pair<Segment, Closeable> segment = hydrant.getAndIncrementSegment();
                    try {
                        QueryRunner<T> baseRunner = QueryRunnerHelper.makeClosingQueryRunner(factory.createRunner(segment.lhs), segment.rhs);
                        // 2) Hydrants are not the same between replicas, make sure cache is local
                        if (hydrantDefinitelySwapped && cache.isLocal()) {
                            return new CachingQueryRunner<>(makeHydrantCacheIdentifier(hydrant), descriptor, objectMapper, cache, toolChest, baseRunner, MoreExecutors.sameThreadExecutor(), cacheConfig);
                        } else {
                            return baseRunner;
                        }
                    } catch (RuntimeException e) {
                        CloseQuietly.close(segment.rhs);
                        throw e;
                    }
                }
            }))), builderFn, sinkSegmentIdentifier, cpuTimeAccumulator), new SpecificSegmentSpec(descriptor));
        }
    }))), builderFn, emitter, cpuTimeAccumulator, true);
}
Also used : Query(io.druid.query.Query) Function(com.google.common.base.Function) ReportTimelineMissingSegmentQueryRunner(io.druid.query.ReportTimelineMissingSegmentQueryRunner) Sink(io.druid.segment.realtime.plumber.Sink) SpecificSegmentQueryRunner(io.druid.query.spec.SpecificSegmentQueryRunner) NoopQueryRunner(io.druid.query.NoopQueryRunner) SegmentDescriptor(io.druid.query.SegmentDescriptor) ISE(io.druid.java.util.common.ISE) FireHydrant(io.druid.segment.realtime.FireHydrant) Pair(io.druid.java.util.common.Pair) BySegmentQueryRunner(io.druid.query.BySegmentQueryRunner) MetricsEmittingQueryRunner(io.druid.query.MetricsEmittingQueryRunner) ReportTimelineMissingSegmentQueryRunner(io.druid.query.ReportTimelineMissingSegmentQueryRunner) CachingQueryRunner(io.druid.client.CachingQueryRunner) BySegmentQueryRunner(io.druid.query.BySegmentQueryRunner) SpecificSegmentQueryRunner(io.druid.query.spec.SpecificSegmentQueryRunner) QueryRunner(io.druid.query.QueryRunner) CPUTimeMetricQueryRunner(io.druid.query.CPUTimeMetricQueryRunner) NoopQueryRunner(io.druid.query.NoopQueryRunner) AtomicLong(java.util.concurrent.atomic.AtomicLong) TableDataSource(io.druid.query.TableDataSource) SpecificSegmentSpec(io.druid.query.spec.SpecificSegmentSpec) ServiceMetricEvent(com.metamx.emitter.service.ServiceMetricEvent) Nullable(javax.annotation.Nullable)

Example 3 with SpecificSegmentSpec

use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.

the class QueryRunnerTestHelper method makeFilteringQueryRunner.

public static <T> QueryRunner<T> makeFilteringQueryRunner(final VersionedIntervalTimeline<String, Segment> timeline, final QueryRunnerFactory<T, Query<T>> factory) {
    final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
    return new FluentQueryRunnerBuilder<T>(toolChest).create(new QueryRunner<T>() {

        @Override
        public Sequence<T> run(Query<T> query, Map<String, Object> responseContext) {
            List<TimelineObjectHolder> segments = Lists.newArrayList();
            for (Interval interval : query.getIntervals()) {
                segments.addAll(timeline.lookup(interval));
            }
            List<Sequence<T>> sequences = Lists.newArrayList();
            for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) {
                Segment segment = holder.getObject().getChunk(0).getObject();
                Query running = query.withQuerySegmentSpec(new SpecificSegmentSpec(new SegmentDescriptor(holder.getInterval(), holder.getVersion(), 0)));
                sequences.add(factory.createRunner(segment).run(running, responseContext));
            }
            return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences));
        }
    }).applyPreMergeDecoration().mergeResults().applyPostMergeDecoration();
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Sequence(io.druid.java.util.common.guava.Sequence) MergeSequence(io.druid.java.util.common.guava.MergeSequence) IncrementalIndexSegment(io.druid.segment.IncrementalIndexSegment) QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) Segment(io.druid.segment.Segment) TimelineObjectHolder(io.druid.timeline.TimelineObjectHolder) SpecificSegmentSpec(io.druid.query.spec.SpecificSegmentSpec) MergeSequence(io.druid.java.util.common.guava.MergeSequence) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Interval(org.joda.time.Interval)

Example 4 with SpecificSegmentSpec

use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.

the class DumpSegment method runMetadata.

private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
    final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
    final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
    withOutputStream(new Function<OutputStream, Object>() {

        @Override
        public Object apply(final OutputStream out) {
            evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {

                @Override
                public Object apply(SegmentAnalysis analysis) {
                    try {
                        objectMapper.writeValue(out, analysis);
                    } catch (IOException e) {
                        throw Throwables.propagate(e);
                    }
                    return null;
                }
            }));
            return null;
        }
    });
}
Also used : ListColumnIncluderator(io.druid.query.metadata.metadata.ListColumnIncluderator) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Json(io.druid.guice.annotations.Json) IOException(java.io.IOException) TableDataSource(io.druid.query.TableDataSource) SpecificSegmentSpec(io.druid.query.spec.SpecificSegmentSpec) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentDescriptor(io.druid.query.SegmentDescriptor) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

SpecificSegmentSpec (io.druid.query.spec.SpecificSegmentSpec)4 SegmentDescriptor (io.druid.query.SegmentDescriptor)3 Function (com.google.common.base.Function)2 Query (io.druid.query.Query)2 TableDataSource (io.druid.query.TableDataSource)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 ServiceMetricEvent (com.metamx.emitter.service.ServiceMetricEvent)1 CachingQueryRunner (io.druid.client.CachingQueryRunner)1 Json (io.druid.guice.annotations.Json)1 ISE (io.druid.java.util.common.ISE)1 Pair (io.druid.java.util.common.Pair)1 MergeSequence (io.druid.java.util.common.guava.MergeSequence)1 Sequence (io.druid.java.util.common.guava.Sequence)1 BySegmentQueryRunner (io.druid.query.BySegmentQueryRunner)1 CPUTimeMetricQueryRunner (io.druid.query.CPUTimeMetricQueryRunner)1 MetricsEmittingQueryRunner (io.druid.query.MetricsEmittingQueryRunner)1 NoopQueryRunner (io.druid.query.NoopQueryRunner)1 QueryRunner (io.druid.query.QueryRunner)1 ReportTimelineMissingSegmentQueryRunner (io.druid.query.ReportTimelineMissingSegmentQueryRunner)1