Search in sources :

Example 26 with QueryToolChest

use of io.druid.query.QueryToolChest in project druid by druid-io.

the class TimeseriesBenchmark method queryMultiQueryableIndex.

@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Result<TimeseriesResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        String segmentName = "qIndex" + i;
        QueryRunner<Result<TimeseriesResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));
    Sequence<Result<TimeseriesResultValue>> queryResult = theRunner.run(query, Maps.<String, Object>newHashMap());
    List<Result<TimeseriesResultValue>> results = Sequences.toList(queryResult, Lists.<Result<TimeseriesResultValue>>newArrayList());
    for (Result<TimeseriesResultValue> result : results) {
        blackhole.consume(result);
    }
}
Also used : QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) BenchmarkMode(org.openjdk.jmh.annotations.BenchmarkMode) Benchmark(org.openjdk.jmh.annotations.Benchmark) OutputTimeUnit(org.openjdk.jmh.annotations.OutputTimeUnit)

Example 27 with QueryToolChest

use of io.druid.query.QueryToolChest in project druid by druid-io.

the class TopNBenchmark method queryMultiQueryableIndex.

@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public void queryMultiQueryableIndex(Blackhole blackhole) throws Exception {
    List<QueryRunner<Result<TopNResultValue>>> singleSegmentRunners = Lists.newArrayList();
    QueryToolChest toolChest = factory.getToolchest();
    for (int i = 0; i < numSegments; i++) {
        String segmentName = "qIndex" + i;
        QueryRunner<Result<TopNResultValue>> runner = QueryBenchmarkUtil.makeQueryRunner(factory, segmentName, new QueryableIndexSegment(segmentName, qIndexes.get(i)));
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(executorService, singleSegmentRunners)), toolChest));
    Sequence<Result<TopNResultValue>> queryResult = theRunner.run(query, Maps.<String, Object>newHashMap());
    List<Result<TopNResultValue>> results = Sequences.toList(queryResult, Lists.<Result<TopNResultValue>>newArrayList());
    for (Result<TopNResultValue> result : results) {
        blackhole.consume(result);
    }
}
Also used : TopNResultValue(io.druid.query.topn.TopNResultValue) QueryableIndexSegment(io.druid.segment.QueryableIndexSegment) TopNQueryQueryToolChest(io.druid.query.topn.TopNQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) BenchmarkMode(org.openjdk.jmh.annotations.BenchmarkMode) Benchmark(org.openjdk.jmh.annotations.Benchmark) OutputTimeUnit(org.openjdk.jmh.annotations.OutputTimeUnit)

Example 28 with QueryToolChest

use of io.druid.query.QueryToolChest in project druid by druid-io.

the class QueryResource method doPost.

@POST
@Produces({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE })
@Consumes({ MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE })
public Response doPost(InputStream in, @QueryParam("pretty") String pretty, // used to get request content-type, remote address and AuthorizationInfo
@Context final HttpServletRequest req) throws IOException {
    final long start = System.currentTimeMillis();
    Query query = null;
    QueryToolChest toolChest = null;
    String queryId = null;
    final ResponseContext context = createContext(req.getContentType(), pretty != null);
    final String currThreadName = Thread.currentThread().getName();
    try {
        query = context.getObjectMapper().readValue(in, Query.class);
        queryId = query.getId();
        if (queryId == null) {
            queryId = UUID.randomUUID().toString();
            query = query.withId(queryId);
        }
        if (query.getContextValue(QueryContextKeys.TIMEOUT) == null) {
            query = query.withOverriddenContext(ImmutableMap.of(QueryContextKeys.TIMEOUT, config.getMaxIdleTime().toStandardDuration().getMillis()));
        }
        toolChest = warehouse.getToolChest(query);
        Thread.currentThread().setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId));
        if (log.isDebugEnabled()) {
            log.debug("Got query [%s]", query);
        }
        if (authConfig.isEnabled()) {
            // This is an experimental feature, see - https://github.com/druid-io/druid/pull/2424
            AuthorizationInfo authorizationInfo = (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN);
            if (authorizationInfo != null) {
                for (String dataSource : query.getDataSource().getNames()) {
                    Access authResult = authorizationInfo.isAuthorized(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ);
                    if (!authResult.isAllowed()) {
                        return Response.status(Response.Status.FORBIDDEN).header("Access-Check-Result", authResult).build();
                    }
                }
            } else {
                throw new ISE("WTF?! Security is enabled but no authorization info found in the request");
            }
        }
        String prevEtag = req.getHeader(HDR_IF_NONE_MATCH);
        if (prevEtag != null) {
            query = query.withOverriddenContext(ImmutableMap.of(HDR_IF_NONE_MATCH, prevEtag));
        }
        final Map<String, Object> responseContext = new MapMaker().makeMap();
        final Sequence res = query.run(texasRanger, responseContext);
        if (prevEtag != null && prevEtag.equals(responseContext.get(HDR_ETAG))) {
            return Response.notModified().build();
        }
        final Sequence results;
        if (res == null) {
            results = Sequences.empty();
        } else {
            results = res;
        }
        final Yielder yielder = Yielders.each(results);
        try {
            final Query theQuery = query;
            final QueryToolChest theToolChest = toolChest;
            final ObjectWriter jsonWriter = context.newOutputWriter();
            Response.ResponseBuilder builder = Response.ok(new StreamingOutput() {

                @Override
                public void write(OutputStream outputStream) throws IOException, WebApplicationException {
                    try {
                        // json serializer will always close the yielder
                        CountingOutputStream os = new CountingOutputStream(outputStream);
                        jsonWriter.writeValue(os, yielder);
                        // Some types of OutputStream suppress flush errors in the .close() method.
                        os.flush();
                        os.close();
                        successfulQueryCount.incrementAndGet();
                        final long queryTime = System.currentTimeMillis() - start;
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).setDimension("success", "true").build("query/time", queryTime));
                        emitter.emit(DruidMetrics.makeQueryTimeMetric(theToolChest, jsonMapper, theQuery, req.getRemoteAddr()).build("query/bytes", os.getCount()));
                        requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), theQuery, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "query/bytes", os.getCount(), "success", true))));
                    } finally {
                        Thread.currentThread().setName(currThreadName);
                    }
                }
            }, context.getContentType()).header("X-Druid-Query-Id", queryId);
            if (responseContext.get(HDR_ETAG) != null) {
                builder.header(HDR_ETAG, responseContext.get(HDR_ETAG));
                responseContext.remove(HDR_ETAG);
            }
            //Limit the response-context header, see https://github.com/druid-io/druid/issues/2331
            //Note that Response.ResponseBuilder.header(String key,Object value).build() calls value.toString()
            //and encodes the string using ASCII, so 1 char is = 1 byte
            String responseCtxString = jsonMapper.writeValueAsString(responseContext);
            if (responseCtxString.length() > RESPONSE_CTX_HEADER_LEN_LIMIT) {
                log.warn("Response Context truncated for id [%s] . Full context is [%s].", queryId, responseCtxString);
                responseCtxString = responseCtxString.substring(0, RESPONSE_CTX_HEADER_LEN_LIMIT);
            }
            return builder.header("X-Druid-Response-Context", responseCtxString).build();
        } catch (Exception e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder.close();
            throw Throwables.propagate(e);
        } finally {
        // do not close yielder here, since we do not want to close the yielder prior to
        // StreamingOutput having iterated over all the results
        }
    } catch (QueryInterruptedException e) {
        try {
            log.warn(e, "Exception while processing queryId [%s]", queryId);
            interruptedQueryCount.incrementAndGet();
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "interrupted", true, "reason", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", query);
        }
        return context.gotError(e);
    } catch (Exception e) {
        // Input stream has already been consumed by the json object mapper if query == null
        final String queryString = query == null ? "unparsable query" : query.toString();
        log.warn(e, "Exception occurred on request [%s]", queryString);
        failedQueryCount.incrementAndGet();
        try {
            final long queryTime = System.currentTimeMillis() - start;
            emitter.emit(DruidMetrics.makeQueryTimeMetric(toolChest, jsonMapper, query, req.getRemoteAddr()).setDimension("success", "false").build("query/time", queryTime));
            requestLogger.log(new RequestLogLine(new DateTime(start), req.getRemoteAddr(), query, new QueryStats(ImmutableMap.<String, Object>of("query/time", queryTime, "success", false, "exception", e.toString()))));
        } catch (Exception e2) {
            log.error(e2, "Unable to log query [%s]!", queryString);
        }
        log.makeAlert(e, "Exception handling request").addData("exception", e.toString()).addData("query", queryString).addData("peer", req.getRemoteAddr()).emit();
        return context.gotError(e);
    } finally {
        Thread.currentThread().setName(currThreadName);
    }
}
Also used : Query(io.druid.query.Query) CountingOutputStream(com.google.common.io.CountingOutputStream) OutputStream(java.io.OutputStream) Access(io.druid.server.security.Access) StreamingOutput(javax.ws.rs.core.StreamingOutput) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) CountingOutputStream(com.google.common.io.CountingOutputStream) ISE(io.druid.java.util.common.ISE) QueryInterruptedException(io.druid.query.QueryInterruptedException) Yielder(io.druid.java.util.common.guava.Yielder) Resource(io.druid.server.security.Resource) MapMaker(com.google.common.collect.MapMaker) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Sequence(io.druid.java.util.common.guava.Sequence) AuthorizationInfo(io.druid.server.security.AuthorizationInfo) WebApplicationException(javax.ws.rs.WebApplicationException) QueryInterruptedException(io.druid.query.QueryInterruptedException) IOException(java.io.IOException) Response(javax.ws.rs.core.Response) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces) Consumes(javax.ws.rs.Consumes)

Example 29 with QueryToolChest

use of io.druid.query.QueryToolChest in project druid by druid-io.

the class RealtimeIndexTaskTest method makeToolbox.

private TaskToolbox makeToolbox(final Task task, final TaskStorage taskStorage, final IndexerMetadataStorageCoordinator mdc, final File directory) {
    final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, false, null, null);
    final TaskLockbox taskLockbox = new TaskLockbox(taskStorage);
    try {
        taskStorage.insert(task, TaskStatus.running(task.getId()));
    } catch (EntryExistsException e) {
    // suppress
    }
    taskLockbox.syncFromStorage();
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, mdc, emitter, EasyMock.createMock(SupervisorManager.class));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox);
    final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(new IntervalChunkingQueryRunnerDecorator(null, null, null) {

        @Override
        public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest) {
            return delegate;
        }
    }), new TimeseriesQueryEngine(), new QueryWatcher() {

        @Override
        public void registerQuery(Query query, ListenableFuture future) {
        // do nothing
        }
    })));
    handOffCallbacks = Maps.newConcurrentMap();
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
                    return true;
                }

                @Override
                public void start() {
                //Noop
                }

                @Override
                public void close() {
                //Noop
                }

                Map<SegmentDescriptor, Pair<Executor, Runnable>> getHandOffCallbacks() {
                    return handOffCallbacks;
                }
            };
        }
    };
    final TestUtils testUtils = new TestUtils();
    final TaskToolboxFactory toolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), handoffNotifierFactory, conglomerate, // queryExecutorService
    MoreExecutors.sameThreadExecutor(), EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexMerger(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9());
    return toolboxFactory.build(task);
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) QueryWatcher(io.druid.query.QueryWatcher) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) TaskActionClientFactory(io.druid.indexing.common.actions.TaskActionClientFactory) TestDataSegmentAnnouncer(io.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(io.druid.indexing.common.config.TaskConfig) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryToolChest(io.druid.query.QueryToolChest) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) TestUtils(io.druid.indexing.common.TestUtils) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) QueryRunnerFactoryConglomerate(io.druid.query.QueryRunnerFactoryConglomerate) TimeseriesQueryEngine(io.druid.query.timeseries.TimeseriesQueryEngine) Executor(java.util.concurrent.Executor) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) IntervalChunkingQueryRunnerDecorator(io.druid.query.IntervalChunkingQueryRunnerDecorator) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) CacheConfig(io.druid.client.cache.CacheConfig) TestDataSegmentPusher(io.druid.indexing.test.TestDataSegmentPusher) Pair(io.druid.java.util.common.Pair) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) MonitorScheduler(com.metamx.metrics.MonitorScheduler) SegmentHandoffNotifier(io.druid.segment.realtime.plumber.SegmentHandoffNotifier) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) EntryExistsException(io.druid.metadata.EntryExistsException) QueryRunner(io.druid.query.QueryRunner) TestDataSegmentKiller(io.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SupervisorManager(io.druid.indexing.overlord.supervisor.SupervisorManager) TimeseriesQueryRunnerFactory(io.druid.query.timeseries.TimeseriesQueryRunnerFactory) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) ListenableFuture(com.google.common.util.concurrent.ListenableFuture)

Example 30 with QueryToolChest

use of io.druid.query.QueryToolChest in project druid by druid-io.

the class GroupByQueryRunnerTest method testBySegmentResultsUnOptimizedDimextraction.

@Test
public void testBySegmentResultsUnOptimizedDimextraction() {
    int segmentCount = 32;
    Result<BySegmentResultValue> singleSegmentResult = new Result<BySegmentResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass(Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine0", "rows", 6L, "idx", 4420L)), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
    List<Result> bySegmentResults = Lists.newArrayList();
    for (int i = 0; i < segmentCount; i++) {
        bySegmentResults.add(singleSegmentResult);
    }
    GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new ExtractionDimensionSpec("quality", "alias", new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("mezzanine", "mezzanine0"), false), false, null, false, false)))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).setContext(ImmutableMap.<String, Object>of("bySegment", true));
    final GroupByQuery fullQuery = builder.build();
    QueryToolChest toolChest = factory.getToolchest();
    List<QueryRunner<Row>> singleSegmentRunners = Lists.newArrayList();
    for (int i = 0; i < segmentCount; i++) {
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
    TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(fullQuery, Maps.newHashMap()), "");
    exec.shutdownNow();
}
Also used : BySegmentResultValue(io.druid.query.BySegmentResultValue) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(io.druid.java.util.common.granularity.PeriodGranularity) BySegmentResultValueClass(io.druid.query.BySegmentResultValueClass) Period(org.joda.time.Period) QueryToolChest(io.druid.query.QueryToolChest) DateTime(org.joda.time.DateTime) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) LookupExtractionFn(io.druid.query.lookup.LookupExtractionFn) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) ExecutorService(java.util.concurrent.ExecutorService) MapLookupExtractor(io.druid.query.extraction.MapLookupExtractor) Interval(org.joda.time.Interval) ExtractionDimensionSpec(io.druid.query.dimension.ExtractionDimensionSpec) Test(org.junit.Test)

Aggregations

QueryToolChest (io.druid.query.QueryToolChest)32 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)27 QueryRunner (io.druid.query.QueryRunner)20 Test (org.junit.Test)15 Result (io.druid.query.Result)13 ExecutorService (java.util.concurrent.ExecutorService)13 SegmentAnalysis (io.druid.query.metadata.metadata.SegmentAnalysis)9 ColumnAnalysis (io.druid.query.metadata.metadata.ColumnAnalysis)8 ListColumnIncluderator (io.druid.query.metadata.metadata.ListColumnIncluderator)8 DateTime (org.joda.time.DateTime)7 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)6 TimeseriesQueryQueryToolChest (io.druid.query.timeseries.TimeseriesQueryQueryToolChest)6 BySegmentResultValue (io.druid.query.BySegmentResultValue)5 BySegmentResultValueClass (io.druid.query.BySegmentResultValueClass)5 Query (io.druid.query.Query)5 TopNQueryQueryToolChest (io.druid.query.topn.TopNQueryQueryToolChest)5 PeriodGranularity (io.druid.java.util.common.granularity.PeriodGranularity)4 SelectorDimFilter (io.druid.query.filter.SelectorDimFilter)4 QueryableIndexSegment (io.druid.segment.QueryableIndexSegment)4 Interval (org.joda.time.Interval)4