Search in sources :

Example 21 with Query

use of io.druid.query.Query in project hive by apache.

the class DruidQueryBasedInputFormat method distributeScanQuery.

/* New method that distributes the Scan query by creating splits containing
   * information about different Druid nodes that have the data for the given
   * query. */
private static HiveDruidSplit[] distributeScanQuery(Configuration conf, String address, ScanQuery query, Path dummyPath) throws IOException {
    // If it has a limit, we use it and we do not distribute the query
    final boolean isFetch = query.getContextBoolean(Constants.DRUID_QUERY_FETCH, false);
    if (isFetch) {
        return new HiveDruidSplit[] { new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(query), dummyPath, new String[] { address }) };
    }
    final List<LocatedSegmentDescriptor> segmentDescriptors = fetchLocatedSegmentDescriptors(address, query);
    // Create one input split for each segment
    final int numSplits = segmentDescriptors.size();
    final HiveDruidSplit[] splits = new HiveDruidSplit[segmentDescriptors.size()];
    for (int i = 0; i < numSplits; i++) {
        final LocatedSegmentDescriptor locatedSD = segmentDescriptors.get(i);
        final String[] hosts = new String[locatedSD.getLocations().size()];
        for (int j = 0; j < locatedSD.getLocations().size(); j++) {
            hosts[j] = locatedSD.getLocations().get(j).getHost();
        }
        // Create partial Select query
        final SegmentDescriptor newSD = new SegmentDescriptor(locatedSD.getInterval(), locatedSD.getVersion(), locatedSD.getPartitionNumber());
        final Query partialQuery = query.withQuerySegmentSpec(new MultipleSpecificSegmentSpec(Lists.newArrayList(newSD)));
        splits[i] = new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(partialQuery), dummyPath, hosts);
    }
    return splits;
}
Also used : MultipleSpecificSegmentSpec(io.druid.query.spec.MultipleSpecificSegmentSpec) LocatedSegmentDescriptor(io.druid.query.LocatedSegmentDescriptor) BaseQuery(io.druid.query.BaseQuery) SelectQuery(io.druid.query.select.SelectQuery) Query(io.druid.query.Query) ScanQuery(io.druid.query.scan.ScanQuery) SegmentDescriptor(io.druid.query.SegmentDescriptor) LocatedSegmentDescriptor(io.druid.query.LocatedSegmentDescriptor)

Example 22 with Query

use of io.druid.query.Query in project druid by druid-io.

the class SegmentMetadataQueryRunnerFactory method createRunner.

@Override
public QueryRunner<SegmentAnalysis> createRunner(final Segment segment) {
    return new QueryRunner<SegmentAnalysis>() {

        @Override
        public Sequence<SegmentAnalysis> run(Query<SegmentAnalysis> inQ, Map<String, Object> responseContext) {
            SegmentMetadataQuery query = (SegmentMetadataQuery) inQ;
            final SegmentAnalyzer analyzer = new SegmentAnalyzer(query.getAnalysisTypes());
            final Map<String, ColumnAnalysis> analyzedColumns = analyzer.analyze(segment);
            final long numRows = analyzer.numRows(segment);
            long totalSize = 0;
            if (analyzer.analyzingSize()) {
                // Initialize with the size of the whitespace, 1 byte per
                totalSize = analyzedColumns.size() * numRows;
            }
            Map<String, ColumnAnalysis> columns = Maps.newTreeMap();
            ColumnIncluderator includerator = query.getToInclude();
            for (Map.Entry<String, ColumnAnalysis> entry : analyzedColumns.entrySet()) {
                final String columnName = entry.getKey();
                final ColumnAnalysis column = entry.getValue();
                if (!column.isError()) {
                    totalSize += column.getSize();
                }
                if (includerator.include(columnName)) {
                    columns.put(columnName, column);
                }
            }
            List<Interval> retIntervals = query.analyzingInterval() ? Arrays.asList(segment.getDataInterval()) : null;
            final Map<String, AggregatorFactory> aggregators;
            Metadata metadata = null;
            if (query.hasAggregators()) {
                metadata = segment.asStorageAdapter().getMetadata();
                if (metadata != null && metadata.getAggregators() != null) {
                    aggregators = Maps.newHashMap();
                    for (AggregatorFactory aggregator : metadata.getAggregators()) {
                        aggregators.put(aggregator.getName(), aggregator);
                    }
                } else {
                    aggregators = null;
                }
            } else {
                aggregators = null;
            }
            final TimestampSpec timestampSpec;
            if (query.hasTimestampSpec()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                timestampSpec = metadata != null ? metadata.getTimestampSpec() : null;
            } else {
                timestampSpec = null;
            }
            final Granularity queryGranularity;
            if (query.hasQueryGranularity()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                queryGranularity = metadata != null ? metadata.getQueryGranularity() : null;
            } else {
                queryGranularity = null;
            }
            Boolean rollup = null;
            if (query.hasRollup()) {
                if (metadata == null) {
                    metadata = segment.asStorageAdapter().getMetadata();
                }
                rollup = metadata != null ? metadata.isRollup() : null;
                if (rollup == null) {
                    // in this case, this segment is built before no-rollup function is coded,
                    // thus it is built with rollup
                    rollup = Boolean.TRUE;
                }
            }
            return Sequences.simple(Arrays.asList(new SegmentAnalysis(segment.getIdentifier(), retIntervals, columns, totalSize, numRows, aggregators, timestampSpec, queryGranularity, rollup)));
        }
    };
}
Also used : BaseQuery(io.druid.query.BaseQuery) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) Query(io.druid.query.Query) Metadata(io.druid.segment.Metadata) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) Granularity(io.druid.java.util.common.granularity.Granularity) ColumnIncluderator(io.druid.query.metadata.metadata.ColumnIncluderator) QueryRunner(io.druid.query.QueryRunner) ConcatQueryRunner(io.druid.query.ConcatQueryRunner) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) ColumnAnalysis(io.druid.query.metadata.metadata.ColumnAnalysis) TimestampSpec(io.druid.data.input.impl.TimestampSpec) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) Map(java.util.Map) Interval(org.joda.time.Interval)

Example 23 with Query

use of io.druid.query.Query in project druid by druid-io.

the class SegmentMetadataQueryRunnerFactory method mergeRunners.

@Override
public QueryRunner<SegmentAnalysis> mergeRunners(ExecutorService exec, Iterable<QueryRunner<SegmentAnalysis>> queryRunners) {
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(exec);
    return new ConcatQueryRunner<SegmentAnalysis>(Sequences.map(Sequences.simple(queryRunners), new Function<QueryRunner<SegmentAnalysis>, QueryRunner<SegmentAnalysis>>() {

        @Override
        public QueryRunner<SegmentAnalysis> apply(final QueryRunner<SegmentAnalysis> input) {
            return new QueryRunner<SegmentAnalysis>() {

                @Override
                public Sequence<SegmentAnalysis> run(final Query<SegmentAnalysis> query, final Map<String, Object> responseContext) {
                    final int priority = BaseQuery.getContextPriority(query, 0);
                    ListenableFuture<Sequence<SegmentAnalysis>> future = queryExecutor.submit(new AbstractPrioritizedCallable<Sequence<SegmentAnalysis>>(priority) {

                        @Override
                        public Sequence<SegmentAnalysis> call() throws Exception {
                            return Sequences.simple(Sequences.toList(input.run(query, responseContext), new ArrayList<SegmentAnalysis>()));
                        }
                    });
                    try {
                        queryWatcher.registerQuery(query, future);
                        final Number timeout = query.getContextValue(QueryContextKeys.TIMEOUT, (Number) null);
                        return timeout == null ? future.get() : future.get(timeout.longValue(), TimeUnit.MILLISECONDS);
                    } catch (InterruptedException e) {
                        log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId());
                        future.cancel(true);
                        throw new QueryInterruptedException(e);
                    } catch (CancellationException e) {
                        throw new QueryInterruptedException(e);
                    } catch (TimeoutException e) {
                        log.info("Query timeout, cancelling pending results for query id [%s]", query.getId());
                        future.cancel(true);
                        throw new QueryInterruptedException(e);
                    } catch (ExecutionException e) {
                        throw Throwables.propagate(e.getCause());
                    }
                }
            };
        }
    }));
}
Also used : BaseQuery(io.druid.query.BaseQuery) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) Query(io.druid.query.Query) Sequence(io.druid.java.util.common.guava.Sequence) QueryInterruptedException(io.druid.query.QueryInterruptedException) QueryRunner(io.druid.query.QueryRunner) ConcatQueryRunner(io.druid.query.ConcatQueryRunner) TimeoutException(java.util.concurrent.TimeoutException) QueryInterruptedException(io.druid.query.QueryInterruptedException) CancellationException(java.util.concurrent.CancellationException) ExecutionException(java.util.concurrent.ExecutionException) Function(com.google.common.base.Function) CancellationException(java.util.concurrent.CancellationException) ConcatQueryRunner(io.druid.query.ConcatQueryRunner) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) ExecutionException(java.util.concurrent.ExecutionException) Map(java.util.Map) QueryInterruptedException(io.druid.query.QueryInterruptedException) TimeoutException(java.util.concurrent.TimeoutException)

Example 24 with Query

use of io.druid.query.Query in project druid by druid-io.

the class CachingClusteredClientTest method testQueryCaching.

@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
    final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
    final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
    parseResults(queryIntervals, expectedResults, args);
    for (int i = 0; i < queryIntervals.size(); ++i) {
        List<Object> mocks = Lists.newArrayList();
        mocks.add(serverView);
        final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
        final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
        List<Capture> queryCaptures = Lists.newArrayList();
        final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
        for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
            DruidServer server = entry.getKey();
            ServerExpectations expectations = entry.getValue();
            EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
            final Capture<? extends Query> capture = new Capture();
            final Capture<? extends Map> context = new Capture();
            queryCaptures.add(capture);
            QueryRunner queryable = expectations.getQueryRunner();
            if (query instanceof TimeseriesQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TimeseriesResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
            } else if (query instanceof TopNQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TopNResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SearchQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<SearchResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
            } else if (query instanceof SelectQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<SelectResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSelectResults(segmentIds, intervals, results)).once();
            } else if (query instanceof GroupByQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Row>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults(segmentIds, intervals, results)).once();
            } else if (query instanceof TimeBoundaryQuery) {
                List<String> segmentIds = Lists.newArrayList();
                List<Interval> intervals = Lists.newArrayList();
                List<Iterable<Result<TimeBoundaryResultValue>>> results = Lists.newArrayList();
                for (ServerExpectation expectation : expectations) {
                    segmentIds.add(expectation.getSegmentId());
                    intervals.add(expectation.getInterval());
                    results.add(expectation.getResults());
                }
                EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
            } else {
                throw new ISE("Unknown query type[%s]", query.getClass());
            }
        }
        final int expectedResultsRangeStart;
        final int expectedResultsRangeEnd;
        if (query instanceof TimeBoundaryQuery) {
            expectedResultsRangeStart = i;
            expectedResultsRangeEnd = i + 1;
        } else {
            expectedResultsRangeStart = 0;
            expectedResultsRangeEnd = i + 1;
        }
        runWithMocks(new Runnable() {

            @Override
            public void run() {
                HashMap<String, List> context = new HashMap<String, List>();
                for (int i = 0; i < numTimesToQuery; ++i) {
                    TestHelper.assertExpectedResults(new MergeIterable<>(Ordering.<Result<Object>>natural().nullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {

                        @Override
                        public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
                            List<Iterable<Result<Object>>> retVal = Lists.newArrayList();
                            final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
                            for (ServerExpectations expectations : exps.values()) {
                                for (ServerExpectation expectation : expectations) {
                                    retVal.add(expectation.getResults());
                                }
                            }
                            return retVal;
                        }
                    })), runner.run(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval))), context));
                    if (queryCompletedCallback != null) {
                        queryCompletedCallback.run();
                    }
                }
            }
        }, mocks.toArray());
        // make sure all the queries were sent down as 'bySegment'
        for (Capture queryCapture : queryCaptures) {
            Query capturedQuery = (Query) queryCapture.getValue();
            if (expectBySegment) {
                Assert.assertEquals(true, capturedQuery.getContextValue("bySegment"));
            } else {
                Assert.assertTrue(capturedQuery.getContextValue("bySegment") == null || capturedQuery.getContextValue("bySegment").equals(false));
            }
        }
    }
}
Also used : TimeseriesResultValue(io.druid.query.timeseries.TimeseriesResultValue) MergeIterable(io.druid.java.util.common.guava.MergeIterable) FunctionalIterable(io.druid.java.util.common.guava.FunctionalIterable) SelectQuery(io.druid.query.select.SelectQuery) GroupByQuery(io.druid.query.groupby.GroupByQuery) SearchQuery(io.druid.query.search.search.SearchQuery) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) TopNQuery(io.druid.query.topn.TopNQuery) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) HashMap(java.util.HashMap) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) Capture(org.easymock.Capture) Result(io.druid.query.Result) SearchResultValue(io.druid.query.search.SearchResultValue) Function(com.google.common.base.Function) HashFunction(com.google.common.hash.HashFunction) GroupByQuery(io.druid.query.groupby.GroupByQuery) MergeIterable(io.druid.java.util.common.guava.MergeIterable) TopNQuery(io.druid.query.topn.TopNQuery) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) ISE(io.druid.java.util.common.ISE) SearchQuery(io.druid.query.search.search.SearchQuery) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) SelectQuery(io.druid.query.select.SelectQuery) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Nullable(javax.annotation.Nullable) Interval(org.joda.time.Interval)

Example 25 with Query

use of io.druid.query.Query in project druid by druid-io.

the class CachingQueryRunnerTest method testCloseAndPopulate.

private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
    final AssertingClosable closable = new AssertingClosable();
    final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {

        @Override
        public void before() {
            Assert.assertFalse(closable.isClosed());
        }

        @Override
        public void after(boolean isDone, Throwable thrown) throws Exception {
            closable.close();
        }
    });
    final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
    Cache cache = new Cache() {

        private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();

        @Override
        public byte[] get(NamedKey key) {
            return baseMap.get(key);
        }

        @Override
        public void put(NamedKey key, byte[] value) {
            baseMap.put(key, value);
            cacheMustBePutOnce.countDown();
        }

        @Override
        public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
            return null;
        }

        @Override
        public void close(String namespace) {
        }

        @Override
        public CacheStats getStats() {
            return null;
        }

        @Override
        public boolean isLocal() {
            return true;
        }

        @Override
        public void doMonitor(ServiceEmitter emitter) {
        }
    };
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return resultSeq;
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    HashMap<String, Object> context = new HashMap<String, Object>();
    Sequence res = runner.run(query, context);
    // base sequence is not closed yet
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    ArrayList results = Sequences.toList(res, new ArrayList());
    Assert.assertTrue(closable.isClosed());
    Assert.assertEquals(expectedRes.toString(), results.toString());
    // wait for background caching finish
    // wait at most 10 seconds to fail the test to avoid block overall tests
    Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
    byte[] cacheValue = cache.get(cacheKey);
    Assert.assertNotNull(cacheValue);
    Function<Object, Result> fn = cacheStrategy.pullFromCache();
    List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
    Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) CacheConfig(io.druid.client.cache.CacheConfig) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IOException(java.io.IOException) QueryRunner(io.druid.query.QueryRunner) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheStrategy(io.druid.query.CacheStrategy) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache) Interval(org.joda.time.Interval)

Aggregations

Query (io.druid.query.Query)48 QueryRunner (io.druid.query.QueryRunner)23 Test (org.junit.Test)22 Interval (org.joda.time.Interval)18 Sequence (io.druid.java.util.common.guava.Sequence)14 Map (java.util.Map)14 FinalizeResultsQueryRunner (io.druid.query.FinalizeResultsQueryRunner)11 SegmentDescriptor (io.druid.query.SegmentDescriptor)11 IOException (java.io.IOException)10 Row (io.druid.data.input.Row)9 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)9 Result (io.druid.query.Result)9 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)8 Function (com.google.common.base.Function)8 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)8 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)8 MergeSequence (io.druid.java.util.common.guava.MergeSequence)7 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ImmutableMap (com.google.common.collect.ImmutableMap)6