use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.
the class RealtimeManager method getQueryRunnerForSegments.
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
final Map<Integer, FireChief> partitionChiefs = chiefs.get(Iterables.getOnlyElement(query.getDataSource().getNames()));
return partitionChiefs == null ? new NoopQueryRunner<T>() : factory.getToolchest().mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), Iterables.transform(specs, new Function<SegmentDescriptor, QueryRunner<T>>() {
@Override
public QueryRunner<T> apply(SegmentDescriptor spec) {
final FireChief retVal = partitionChiefs.get(spec.getPartitionNumber());
return retVal == null ? new NoopQueryRunner<T>() : retVal.getQueryRunner(query.withQuerySegmentSpec(new SpecificSegmentSpec(spec)));
}
})));
}
use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.
the class SinkQuerySegmentWalker method getQueryRunnerForSegments.
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) {
// We only handle one particular dataSource. Make sure that's what we have, then ignore from here on out.
if (!(query.getDataSource() instanceof TableDataSource) || !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) {
log.makeAlert("Received query for unknown dataSource").addData("dataSource", query.getDataSource()).emit();
return new NoopQueryRunner<>();
}
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
if (factory == null) {
throw new ISE("Unknown query type[%s].", query.getClass());
}
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = new Function<Query<T>, ServiceMetricEvent.Builder>() {
@Override
public ServiceMetricEvent.Builder apply(@Nullable Query<T> input) {
return toolChest.makeMetricBuilder(query);
}
};
final boolean skipIncrementalSegment = query.getContextValue(CONTEXT_SKIP_INCREMENTAL_SEGMENT, false);
final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
return CPUTimeMetricQueryRunner.safeBuild(toolChest.mergeResults(factory.mergeRunners(queryExecutorService, FunctionalIterable.create(specs).transform(new Function<SegmentDescriptor, QueryRunner<T>>() {
@Override
public QueryRunner<T> apply(final SegmentDescriptor descriptor) {
final PartitionHolder<Sink> holder = sinkTimeline.findEntry(descriptor.getInterval(), descriptor.getVersion());
if (holder == null) {
return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
}
final PartitionChunk<Sink> chunk = holder.getChunk(descriptor.getPartitionNumber());
if (chunk == null) {
return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
}
final Sink theSink = chunk.getObject();
final String sinkSegmentIdentifier = theSink.getSegment().getIdentifier();
return new SpecificSegmentQueryRunner<>(withPerSinkMetrics(new BySegmentQueryRunner<>(sinkSegmentIdentifier, descriptor.getInterval().getStart(), factory.mergeRunners(MoreExecutors.sameThreadExecutor(), Iterables.transform(theSink, new Function<FireHydrant, QueryRunner<T>>() {
@Override
public QueryRunner<T> apply(final FireHydrant hydrant) {
// Hydrant might swap at any point, but if it's swapped at the start
// then we know it's *definitely* swapped.
final boolean hydrantDefinitelySwapped = hydrant.hasSwapped();
if (skipIncrementalSegment && !hydrantDefinitelySwapped) {
return new NoopQueryRunner<>();
}
// Prevent the underlying segment from swapping when its being iterated
final Pair<Segment, Closeable> segment = hydrant.getAndIncrementSegment();
try {
QueryRunner<T> baseRunner = QueryRunnerHelper.makeClosingQueryRunner(factory.createRunner(segment.lhs), segment.rhs);
// 2) Hydrants are not the same between replicas, make sure cache is local
if (hydrantDefinitelySwapped && cache.isLocal()) {
return new CachingQueryRunner<>(makeHydrantCacheIdentifier(hydrant), descriptor, objectMapper, cache, toolChest, baseRunner, MoreExecutors.sameThreadExecutor(), cacheConfig);
} else {
return baseRunner;
}
} catch (RuntimeException e) {
CloseQuietly.close(segment.rhs);
throw e;
}
}
}))), builderFn, sinkSegmentIdentifier, cpuTimeAccumulator), new SpecificSegmentSpec(descriptor));
}
}))), builderFn, emitter, cpuTimeAccumulator, true);
}
use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.
the class QueryRunnerTestHelper method makeFilteringQueryRunner.
public static <T> QueryRunner<T> makeFilteringQueryRunner(final VersionedIntervalTimeline<String, Segment> timeline, final QueryRunnerFactory<T, Query<T>> factory) {
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
return new FluentQueryRunnerBuilder<T>(toolChest).create(new QueryRunner<T>() {
@Override
public Sequence<T> run(Query<T> query, Map<String, Object> responseContext) {
List<TimelineObjectHolder> segments = Lists.newArrayList();
for (Interval interval : query.getIntervals()) {
segments.addAll(timeline.lookup(interval));
}
List<Sequence<T>> sequences = Lists.newArrayList();
for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) {
Segment segment = holder.getObject().getChunk(0).getObject();
Query running = query.withQuerySegmentSpec(new SpecificSegmentSpec(new SegmentDescriptor(holder.getInterval(), holder.getVersion(), 0)));
sequences.add(factory.createRunner(segment).run(running, responseContext));
}
return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences));
}
}).applyPreMergeDecoration().mergeResults().applyPostMergeDecoration();
}
use of io.druid.query.spec.SpecificSegmentSpec in project druid by druid-io.
the class DumpSegment method runMetadata.
private void runMetadata(final Injector injector, final QueryableIndex index) throws IOException {
final ObjectMapper objectMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class)).copy().configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
final SegmentMetadataQuery query = new SegmentMetadataQuery(new TableDataSource("dataSource"), new SpecificSegmentSpec(new SegmentDescriptor(index.getDataInterval(), "0", 0)), new ListColumnIncluderator(getColumnsToInclude(index)), false, null, EnumSet.allOf(SegmentMetadataQuery.AnalysisType.class), false, false);
withOutputStream(new Function<OutputStream, Object>() {
@Override
public Object apply(final OutputStream out) {
evaluateSequenceForSideEffects(Sequences.map(executeQuery(injector, index, query), new Function<SegmentAnalysis, Object>() {
@Override
public Object apply(SegmentAnalysis analysis) {
try {
objectMapper.writeValue(out, analysis);
} catch (IOException e) {
throw Throwables.propagate(e);
}
return null;
}
}));
return null;
}
});
}
Aggregations