Search in sources :

Example 66 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class ScanQueryEngine method process.

public Sequence<ScanResultValue> process(final ScanQuery query, final Segment segment, final ResponseContext responseContext) {
    // "legacy" should be non-null due to toolChest.mergeResults
    final boolean legacy = Preconditions.checkNotNull(query.isLegacy(), "Expected non-null 'legacy' parameter");
    final Long numScannedRows = responseContext.getRowScanCount();
    if (numScannedRows != null && numScannedRows >= query.getScanRowsLimit() && query.getTimeOrder().equals(ScanQuery.Order.NONE)) {
        return Sequences.empty();
    }
    final boolean hasTimeout = QueryContexts.hasTimeout(query);
    final Long timeoutAt = responseContext.getTimeoutTime();
    final long start = System.currentTimeMillis();
    final StorageAdapter adapter = segment.asStorageAdapter();
    if (adapter == null) {
        throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }
    final List<String> allColumns = new ArrayList<>();
    if (query.getColumns() != null && !query.getColumns().isEmpty()) {
        if (legacy && !query.getColumns().contains(LEGACY_TIMESTAMP_KEY)) {
            allColumns.add(LEGACY_TIMESTAMP_KEY);
        }
        // Unless we're in legacy mode, allColumns equals query.getColumns() exactly. This is nice since it makes
        // the compactedList form easier to use.
        allColumns.addAll(query.getColumns());
    } else {
        final Set<String> availableColumns = Sets.newLinkedHashSet(Iterables.concat(Collections.singleton(legacy ? LEGACY_TIMESTAMP_KEY : ColumnHolder.TIME_COLUMN_NAME), Iterables.transform(Arrays.asList(query.getVirtualColumns().getVirtualColumns()), VirtualColumn::getOutputName), adapter.getAvailableDimensions(), adapter.getAvailableMetrics()));
        allColumns.addAll(availableColumns);
        if (legacy) {
            allColumns.remove(ColumnHolder.TIME_COLUMN_NAME);
        }
    }
    final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
    Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
    final SegmentId segmentId = segment.getId();
    final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getFilter()));
    // If the row count is not set, set it to 0, else do nothing.
    responseContext.addRowScanCount(0);
    final long limit = calculateRemainingScanRowsLimit(query, responseContext);
    return Sequences.concat(adapter.makeCursors(filter, intervals.get(0), query.getVirtualColumns(), Granularities.ALL, query.getTimeOrder().equals(ScanQuery.Order.DESCENDING) || (query.getTimeOrder().equals(ScanQuery.Order.NONE) && query.isDescending()), null).map(cursor -> new BaseSequence<>(new BaseSequence.IteratorMaker<ScanResultValue, Iterator<ScanResultValue>>() {

        @Override
        public Iterator<ScanResultValue> make() {
            final List<BaseObjectColumnValueSelector> columnSelectors = new ArrayList<>(allColumns.size());
            for (String column : allColumns) {
                final BaseObjectColumnValueSelector selector;
                if (legacy && LEGACY_TIMESTAMP_KEY.equals(column)) {
                    selector = cursor.getColumnSelectorFactory().makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME);
                } else {
                    selector = cursor.getColumnSelectorFactory().makeColumnValueSelector(column);
                }
                columnSelectors.add(selector);
            }
            final int batchSize = query.getBatchSize();
            return new Iterator<ScanResultValue>() {

                private long offset = 0;

                @Override
                public boolean hasNext() {
                    return !cursor.isDone() && offset < limit;
                }

                @Override
                public ScanResultValue next() {
                    if (!hasNext()) {
                        throw new NoSuchElementException();
                    }
                    if (hasTimeout && System.currentTimeMillis() >= timeoutAt) {
                        throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query [%s] timed out", query.getId()));
                    }
                    final long lastOffset = offset;
                    final Object events;
                    final ScanQuery.ResultFormat resultFormat = query.getResultFormat();
                    if (ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST.equals(resultFormat)) {
                        events = rowsToCompactedList();
                    } else if (ScanQuery.ResultFormat.RESULT_FORMAT_LIST.equals(resultFormat)) {
                        events = rowsToList();
                    } else {
                        throw new UOE("resultFormat[%s] is not supported", resultFormat.toString());
                    }
                    responseContext.addRowScanCount(offset - lastOffset);
                    if (hasTimeout) {
                        responseContext.putTimeoutTime(timeoutAt - (System.currentTimeMillis() - start));
                    }
                    return new ScanResultValue(segmentId.toString(), allColumns, events);
                }

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }

                private List<List<Object>> rowsToCompactedList() {
                    final List<List<Object>> events = new ArrayList<>(batchSize);
                    final long iterLimit = Math.min(limit, offset + batchSize);
                    for (; !cursor.isDone() && offset < iterLimit; cursor.advance(), offset++) {
                        final List<Object> theEvent = new ArrayList<>(allColumns.size());
                        for (int j = 0; j < allColumns.size(); j++) {
                            theEvent.add(getColumnValue(j));
                        }
                        events.add(theEvent);
                    }
                    return events;
                }

                private List<Map<String, Object>> rowsToList() {
                    List<Map<String, Object>> events = Lists.newArrayListWithCapacity(batchSize);
                    final long iterLimit = Math.min(limit, offset + batchSize);
                    for (; !cursor.isDone() && offset < iterLimit; cursor.advance(), offset++) {
                        final Map<String, Object> theEvent = new LinkedHashMap<>();
                        for (int j = 0; j < allColumns.size(); j++) {
                            theEvent.put(allColumns.get(j), getColumnValue(j));
                        }
                        events.add(theEvent);
                    }
                    return events;
                }

                private Object getColumnValue(int i) {
                    final BaseObjectColumnValueSelector selector = columnSelectors.get(i);
                    final Object value;
                    if (legacy && allColumns.get(i).equals(LEGACY_TIMESTAMP_KEY)) {
                        value = DateTimes.utc((long) selector.getObject());
                    } else {
                        value = selector == null ? null : selector.getObject();
                    }
                    return value;
                }
            };
        }

        @Override
        public void cleanup(Iterator<ScanResultValue> iterFromMake) {
        }
    })));
}
Also used : Iterables(com.google.common.collect.Iterables) Arrays(java.util.Arrays) StorageAdapter(org.apache.druid.segment.StorageAdapter) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) Interval(org.joda.time.Interval) Lists(com.google.common.collect.Lists) ColumnHolder(org.apache.druid.segment.column.ColumnHolder) Map(java.util.Map) UOE(org.apache.druid.java.util.common.UOE) NoSuchElementException(java.util.NoSuchElementException) BaseObjectColumnValueSelector(org.apache.druid.segment.BaseObjectColumnValueSelector) Sequences(org.apache.druid.java.util.common.guava.Sequences) Segment(org.apache.druid.segment.Segment) DateTimes(org.apache.druid.java.util.common.DateTimes) Sequence(org.apache.druid.java.util.common.guava.Sequence) Iterator(java.util.Iterator) ResponseContext(org.apache.druid.query.context.ResponseContext) VirtualColumn(org.apache.druid.segment.VirtualColumn) StringUtils(org.apache.druid.java.util.common.StringUtils) Set(java.util.Set) ISE(org.apache.druid.java.util.common.ISE) Sets(com.google.common.collect.Sets) QueryContexts(org.apache.druid.query.QueryContexts) Granularities(org.apache.druid.java.util.common.granularity.Granularities) List(java.util.List) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) Preconditions(com.google.common.base.Preconditions) BaseSequence(org.apache.druid.java.util.common.guava.BaseSequence) SegmentId(org.apache.druid.timeline.SegmentId) Filters(org.apache.druid.segment.filter.Filters) Collections(java.util.Collections) Filter(org.apache.druid.query.filter.Filter) ArrayList(java.util.ArrayList) StorageAdapter(org.apache.druid.segment.StorageAdapter) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) Iterator(java.util.Iterator) ISE(org.apache.druid.java.util.common.ISE) ArrayList(java.util.ArrayList) List(java.util.List) SegmentId(org.apache.druid.timeline.SegmentId) BaseObjectColumnValueSelector(org.apache.druid.segment.BaseObjectColumnValueSelector) UOE(org.apache.druid.java.util.common.UOE) BaseSequence(org.apache.druid.java.util.common.guava.BaseSequence) Filter(org.apache.druid.query.filter.Filter) VirtualColumn(org.apache.druid.segment.VirtualColumn) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) NoSuchElementException(java.util.NoSuchElementException) Interval(org.joda.time.Interval)

Example 67 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class SegmentMetadataQueryRunnerFactory method mergeRunners.

@Override
public QueryRunner<SegmentAnalysis> mergeRunners(QueryProcessingPool queryProcessingPool, Iterable<QueryRunner<SegmentAnalysis>> queryRunners) {
    return new ConcatQueryRunner<SegmentAnalysis>(Sequences.map(Sequences.simple(queryRunners), new Function<QueryRunner<SegmentAnalysis>, QueryRunner<SegmentAnalysis>>() {

        @Override
        public QueryRunner<SegmentAnalysis> apply(final QueryRunner<SegmentAnalysis> input) {
            return new QueryRunner<SegmentAnalysis>() {

                @Override
                public Sequence<SegmentAnalysis> run(final QueryPlus<SegmentAnalysis> queryPlus, final ResponseContext responseContext) {
                    final Query<SegmentAnalysis> query = queryPlus.getQuery();
                    final int priority = QueryContexts.getPriority(query);
                    final QueryPlus<SegmentAnalysis> threadSafeQueryPlus = queryPlus.withoutThreadUnsafeState();
                    ListenableFuture<Sequence<SegmentAnalysis>> future = queryProcessingPool.submitRunnerTask(new AbstractPrioritizedQueryRunnerCallable<Sequence<SegmentAnalysis>, SegmentAnalysis>(priority, input) {

                        @Override
                        public Sequence<SegmentAnalysis> call() {
                            return Sequences.simple(input.run(threadSafeQueryPlus, responseContext).toList());
                        }
                    });
                    try {
                        queryWatcher.registerQueryFuture(query, future);
                        if (QueryContexts.hasTimeout(query)) {
                            return future.get(QueryContexts.getTimeout(query), TimeUnit.MILLISECONDS);
                        } else {
                            return future.get();
                        }
                    } catch (InterruptedException e) {
                        log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId());
                        future.cancel(true);
                        throw new QueryInterruptedException(e);
                    } catch (CancellationException e) {
                        throw new QueryInterruptedException(e);
                    } catch (TimeoutException e) {
                        log.info("Query timeout, cancelling pending results for query id [%s]", query.getId());
                        future.cancel(true);
                        throw new QueryTimeoutException(StringUtils.nonStrictFormat("Query [%s] timed out", query.getId()));
                    } catch (ExecutionException e) {
                        throw new RuntimeException(e);
                    }
                }
            };
        }
    }));
}
Also used : Sequence(org.apache.druid.java.util.common.guava.Sequence) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) ConcatQueryRunner(org.apache.druid.query.ConcatQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) Function(com.google.common.base.Function) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException) CancellationException(java.util.concurrent.CancellationException) ResponseContext(org.apache.druid.query.context.ResponseContext) ConcatQueryRunner(org.apache.druid.query.ConcatQueryRunner) SegmentAnalysis(org.apache.druid.query.metadata.metadata.SegmentAnalysis) ExecutionException(java.util.concurrent.ExecutionException) QueryPlus(org.apache.druid.query.QueryPlus) QueryInterruptedException(org.apache.druid.query.QueryInterruptedException) TimeoutException(java.util.concurrent.TimeoutException) QueryTimeoutException(org.apache.druid.query.QueryTimeoutException)

Example 68 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class SpecificSegmentQueryRunner method run.

@Override
public Sequence<T> run(final QueryPlus<T> input, final ResponseContext responseContext) {
    final QueryPlus<T> queryPlus = input.withQuery(Queries.withSpecificSegments(input.getQuery(), Collections.singletonList(specificSpec.getDescriptor())));
    final Query<T> query = queryPlus.getQuery();
    final Thread currThread = Thread.currentThread();
    final String currThreadName = currThread.getName();
    final String newName = query.getType() + "_" + query.getDataSource() + "_" + query.getIntervals();
    final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, () -> base.run(queryPlus, responseContext));
    Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {

        @Override
        public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
            try {
                return baseSequence.accumulate(initValue, accumulator);
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return initValue;
            }
        }

        @Override
        public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
            try {
                return makeYielder(baseSequence.toYielder(initValue, accumulator));
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return Yielders.done(initValue, null);
            }
        }

        private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
            return new Yielder<OutType>() {

                @Override
                public OutType get() {
                    return yielder.get();
                }

                @Override
                public Yielder<OutType> next(final OutType initValue) {
                    try {
                        return yielder.next(initValue);
                    } catch (SegmentMissingException e) {
                        appendMissingSegment(responseContext);
                        return Yielders.done(initValue, null);
                    }
                }

                @Override
                public boolean isDone() {
                    return yielder.isDone();
                }

                @Override
                public void close() throws IOException {
                    yielder.close();
                }
            };
        }
    };
    return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {

        @Override
        public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
            return doNamed(currThread, currThreadName, newName, sequenceProcessing);
        }
    });
}
Also used : YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator) Accumulator(org.apache.druid.java.util.common.guava.Accumulator) SequenceWrapper(org.apache.druid.java.util.common.guava.SequenceWrapper) Yielder(org.apache.druid.java.util.common.guava.Yielder) SegmentMissingException(org.apache.druid.segment.SegmentMissingException) Sequence(org.apache.druid.java.util.common.guava.Sequence) YieldingAccumulator(org.apache.druid.java.util.common.guava.YieldingAccumulator)

Example 69 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class ChainedExecutionQueryRunnerTest method testQueryCancellation.

@Test(timeout = 60_000L)
public void testQueryCancellation() throws Exception {
    ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return "test";
        }

        @Override
        public int getNumThreads() {
            return 2;
        }
    });
    final CountDownLatch queriesStarted = new CountDownLatch(2);
    final CountDownLatch queriesInterrupted = new CountDownLatch(2);
    final CountDownLatch queryIsRegistered = new CountDownLatch(1);
    Capture<ListenableFuture> capturedFuture = EasyMock.newCapture();
    QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
    watcher.registerQueryFuture(EasyMock.anyObject(), EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {

        @Override
        public Void answer() {
            queryIsRegistered.countDown();
            return null;
        }
    }).once();
    EasyMock.replay(watcher);
    ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
    Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
    ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(new ForwardingQueryProcessingPool(exec), watcher, Lists.newArrayList(runners));
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
    final Sequence seq = chainedRunner.run(QueryPlus.wrap(query));
    Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {

        @Override
        public void run() {
            seq.toList();
        }
    });
    // wait for query to register and start
    queryIsRegistered.await();
    queriesStarted.await();
    // cancel the query
    Assert.assertTrue(capturedFuture.hasCaptured());
    ListenableFuture future = capturedFuture.getValue();
    future.cancel(true);
    QueryInterruptedException cause = null;
    try {
        resultFuture.get();
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof QueryInterruptedException);
        cause = (QueryInterruptedException) e.getCause();
    }
    queriesInterrupted.await();
    Assert.assertNotNull(cause);
    Assert.assertTrue(future.isCancelled());
    DyingQueryRunner interrupted1 = interrupted.poll();
    synchronized (interrupted1) {
        Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
        Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
    }
    DyingQueryRunner interrupted2 = interrupted.poll();
    synchronized (interrupted2) {
        Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
        Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
    }
    runners.remove(interrupted1);
    runners.remove(interrupted2);
    DyingQueryRunner remainingRunner = runners.iterator().next();
    synchronized (remainingRunner) {
        Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
    }
    Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
    Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
    Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
    EasyMock.verify(watcher);
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Lifecycle(org.apache.druid.java.util.common.lifecycle.Lifecycle) Sequence(org.apache.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IAnswer(org.easymock.IAnswer) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 70 with Sequence

use of org.apache.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class ChainedExecutionQueryRunnerTest method testQueryTimeout.

@Test(timeout = 60_000L)
public void testQueryTimeout() throws Exception {
    ExecutorService exec = PrioritizedExecutorService.create(new Lifecycle(), new DruidProcessingConfig() {

        @Override
        public String getFormatString() {
            return "test";
        }

        @Override
        public int getNumThreads() {
            return 2;
        }
    });
    final CountDownLatch queriesStarted = new CountDownLatch(2);
    final CountDownLatch queriesInterrupted = new CountDownLatch(2);
    final CountDownLatch queryIsRegistered = new CountDownLatch(1);
    Capture<ListenableFuture> capturedFuture = Capture.newInstance();
    QueryWatcher watcher = EasyMock.createStrictMock(QueryWatcher.class);
    watcher.registerQueryFuture(EasyMock.anyObject(), EasyMock.and(EasyMock.anyObject(), EasyMock.capture(capturedFuture)));
    EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {

        @Override
        public Void answer() {
            queryIsRegistered.countDown();
            return null;
        }
    }).once();
    EasyMock.replay(watcher);
    ArrayBlockingQueue<DyingQueryRunner> interrupted = new ArrayBlockingQueue<>(3);
    Set<DyingQueryRunner> runners = Sets.newHashSet(new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted), new DyingQueryRunner(queriesStarted, queriesInterrupted, interrupted));
    ChainedExecutionQueryRunner chainedRunner = new ChainedExecutionQueryRunner<>(new ForwardingQueryProcessingPool(exec), watcher, Lists.newArrayList(runners));
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").intervals("2014/2015").aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 100, "queryId", "test")).build();
    final Sequence seq = chainedRunner.run(QueryPlus.wrap(query));
    Future resultFuture = Executors.newFixedThreadPool(1).submit(new Runnable() {

        @Override
        public void run() {
            seq.toList();
        }
    });
    // wait for query to register and start
    queryIsRegistered.await();
    queriesStarted.await();
    Assert.assertTrue(capturedFuture.hasCaptured());
    ListenableFuture future = capturedFuture.getValue();
    // wait for query to time out
    QueryTimeoutException cause = null;
    try {
        resultFuture.get();
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof QueryTimeoutException);
        Assert.assertEquals("Query timeout", ((QueryTimeoutException) e.getCause()).getErrorCode());
        cause = (QueryTimeoutException) e.getCause();
    }
    queriesInterrupted.await();
    Assert.assertNotNull(cause);
    Assert.assertTrue(future.isCancelled());
    DyingQueryRunner interrupted1 = interrupted.poll();
    synchronized (interrupted1) {
        Assert.assertTrue("runner 1 started", interrupted1.hasStarted);
        Assert.assertTrue("runner 1 interrupted", interrupted1.interrupted);
    }
    DyingQueryRunner interrupted2 = interrupted.poll();
    synchronized (interrupted2) {
        Assert.assertTrue("runner 2 started", interrupted2.hasStarted);
        Assert.assertTrue("runner 2 interrupted", interrupted2.interrupted);
    }
    runners.remove(interrupted1);
    runners.remove(interrupted2);
    DyingQueryRunner remainingRunner = runners.iterator().next();
    synchronized (remainingRunner) {
        Assert.assertTrue("runner 3 should be interrupted or not have started", !remainingRunner.hasStarted || remainingRunner.interrupted);
    }
    Assert.assertFalse("runner 1 not completed", interrupted1.hasCompleted);
    Assert.assertFalse("runner 2 not completed", interrupted2.hasCompleted);
    Assert.assertFalse("runner 3 not completed", remainingRunner.hasCompleted);
    EasyMock.verify(watcher);
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Lifecycle(org.apache.druid.java.util.common.lifecycle.Lifecycle) Sequence(org.apache.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IAnswer(org.easymock.IAnswer) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) ExecutorService(java.util.concurrent.ExecutorService) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Aggregations

Sequence (org.apache.druid.java.util.common.guava.Sequence)102 Test (org.junit.Test)53 List (java.util.List)44 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)37 ResponseContext (org.apache.druid.query.context.ResponseContext)32 ImmutableList (com.google.common.collect.ImmutableList)29 Intervals (org.apache.druid.java.util.common.Intervals)28 Granularities (org.apache.druid.java.util.common.granularity.Granularities)28 QueryRunner (org.apache.druid.query.QueryRunner)28 ArrayList (java.util.ArrayList)27 VirtualColumns (org.apache.druid.segment.VirtualColumns)26 Cursor (org.apache.druid.segment.Cursor)25 QueryPlus (org.apache.druid.query.QueryPlus)24 Result (org.apache.druid.query.Result)24 NullHandling (org.apache.druid.common.config.NullHandling)22 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)22 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)21 QueryableIndexStorageAdapter (org.apache.druid.segment.QueryableIndexStorageAdapter)20 DataSegment (org.apache.druid.timeline.DataSegment)20 ImmutableMap (com.google.common.collect.ImmutableMap)18