Search in sources :

Example 16 with TimeseriesQueryRunnerFactory

use of org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory in project druid by druid-io.

the class RealtimeIndexTaskTest method makeToolbox.

private TaskToolbox makeToolbox(final Task task, final TaskStorage taskStorage, final IndexerMetadataStorageCoordinator mdc, final File directory) {
    final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, true, null, null, null, false, false, TaskConfig.BATCH_PROCESSING_MODE_DEFAULT.name());
    final TaskLockbox taskLockbox = new TaskLockbox(taskStorage, mdc);
    try {
        taskStorage.insert(task, TaskStatus.running(task.getId()));
    } catch (EntryExistsException e) {
    // suppress
    }
    taskLockbox.syncFromStorage();
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, mdc, EMITTER, EasyMock.createMock(SupervisorManager.class));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox, new TaskAuditLogConfig(false));
    final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate(ImmutableMap.of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), new QueryWatcher() {

        @Override
        public void registerQueryFuture(Query query, ListenableFuture future) {
        // do nothing
        }
    })));
    handOffCallbacks = new ConcurrentHashMap<>();
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable));
                    return true;
                }

                @Override
                public void start() {
                // Noop
                }

                @Override
                public void close() {
                // Noop
                }
            };
        }
    };
    final TestUtils testUtils = new TestUtils();
    final TaskToolboxFactory toolboxFactory = new TaskToolboxFactory(taskConfig, // taskExecutorNode
    null, taskActionClientFactory, EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, DirectQueryProcessingPool.INSTANCE, NoopJoinableFactory.INSTANCE, () -> EasyMock.createMock(MonitorScheduler.class), new SegmentCacheManagerFactory(testUtils.getTestObjectMapper()), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), new CachePopulatorStats(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new NoopTestTaskReportFileWriter(), null, AuthTestUtils.TEST_AUTHORIZER_MAPPER, new NoopChatHandlerProvider(), testUtils.getRowIngestionMetersFactory(), new TestAppenderatorsManager(), new NoopIndexingServiceClient(), null, null, null);
    return toolboxFactory.build(task);
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Query(org.apache.druid.query.Query) QueryWatcher(org.apache.druid.query.QueryWatcher) TaskActionClientFactory(org.apache.druid.indexing.common.actions.TaskActionClientFactory) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) TestDataSegmentAnnouncer(org.apache.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(org.apache.druid.indexing.common.config.TaskConfig) DruidNodeAnnouncer(org.apache.druid.discovery.DruidNodeAnnouncer) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) TaskAuditLogConfig(org.apache.druid.indexing.common.actions.TaskAuditLogConfig) AuthTestUtils(org.apache.druid.server.security.AuthTestUtils) TestUtils(org.apache.druid.indexing.common.TestUtils) QueryRunnerFactoryConglomerate(org.apache.druid.query.QueryRunnerFactoryConglomerate) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) Executor(java.util.concurrent.Executor) NoopIndexingServiceClient(org.apache.druid.client.indexing.NoopIndexingServiceClient) TaskToolboxFactory(org.apache.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(org.apache.druid.query.SegmentDescriptor) CachePopulatorStats(org.apache.druid.client.cache.CachePopulatorStats) TaskActionToolbox(org.apache.druid.indexing.common.actions.TaskActionToolbox) LocalTaskActionClientFactory(org.apache.druid.indexing.common.actions.LocalTaskActionClientFactory) CacheConfig(org.apache.druid.client.cache.CacheConfig) TestDataSegmentPusher(org.apache.druid.indexing.test.TestDataSegmentPusher) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) MonitorScheduler(org.apache.druid.java.util.metrics.MonitorScheduler) NoopChatHandlerProvider(org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider) SegmentHandoffNotifier(org.apache.druid.segment.handoff.SegmentHandoffNotifier) SegmentCacheManagerFactory(org.apache.druid.indexing.common.SegmentCacheManagerFactory) DefaultQueryRunnerFactoryConglomerate(org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate) EntryExistsException(org.apache.druid.metadata.EntryExistsException) LookupNodeService(org.apache.druid.discovery.LookupNodeService) TestDataSegmentKiller(org.apache.druid.indexing.test.TestDataSegmentKiller) DataSegmentServerAnnouncer(org.apache.druid.server.coordination.DataSegmentServerAnnouncer) SegmentHandoffNotifierFactory(org.apache.druid.segment.handoff.SegmentHandoffNotifierFactory) SupervisorManager(org.apache.druid.indexing.overlord.supervisor.SupervisorManager) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) TaskLockbox(org.apache.druid.indexing.overlord.TaskLockbox) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) DruidNode(org.apache.druid.server.DruidNode) DataNodeService(org.apache.druid.discovery.DataNodeService)

Example 17 with TimeseriesQueryRunnerFactory

use of org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory in project druid by druid-io.

the class OnheapIncrementalIndexBenchmark method testConcurrentAddRead.

@Ignore
@Test
@BenchmarkOptions(callgc = true, clock = Clock.REAL_TIME, warmupRounds = 10, benchmarkRounds = 20)
public void testConcurrentAddRead() throws InterruptedException, ExecutionException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
    final int taskCount = 30;
    final int concurrentThreads = 3;
    final int elementsPerThread = 1 << 15;
    final IncrementalIndex incrementalIndex = this.incrementalIndex.getConstructor(IncrementalIndexSchema.class, boolean.class, boolean.class, boolean.class, boolean.class, int.class).newInstance(new IncrementalIndexSchema.Builder().withMetrics(factories).build(), true, true, false, true, elementsPerThread * taskCount);
    final ArrayList<AggregatorFactory> queryAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1);
    queryAggregatorFactories.add(new CountAggregatorFactory("rows"));
    for (int i = 0; i < DIMENSION_COUNT; ++i) {
        queryAggregatorFactories.add(new LongSumAggregatorFactory(StringUtils.format("sumResult%s", i), StringUtils.format("sumResult%s", i)));
        queryAggregatorFactories.add(new DoubleSumAggregatorFactory(StringUtils.format("doubleSumResult%s", i), StringUtils.format("doubleSumResult%s", i)));
    }
    final ListeningExecutorService indexExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("index-executor-%d").setPriority(Thread.MIN_PRIORITY).build()));
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(concurrentThreads, new ThreadFactoryBuilder().setDaemon(false).setNameFormat("query-executor-%d").build()));
    final long timestamp = System.currentTimeMillis();
    final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z");
    final List<ListenableFuture<?>> indexFutures = new ArrayList<>();
    final List<ListenableFuture<?>> queryFutures = new ArrayList<>();
    final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null);
    final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
    final AtomicInteger currentlyRunning = new AtomicInteger(0);
    final AtomicBoolean concurrentlyRan = new AtomicBoolean(false);
    final AtomicBoolean someoneRan = new AtomicBoolean(false);
    for (int j = 0; j < taskCount; j++) {
        indexFutures.add(indexExecutor.submit(new Runnable() {

            @Override
            public void run() {
                currentlyRunning.incrementAndGet();
                try {
                    for (int i = 0; i < elementsPerThread; i++) {
                        incrementalIndex.add(getLongRow(timestamp + i, 1, DIMENSION_COUNT));
                    }
                } catch (IndexSizeExceededException e) {
                    throw new RuntimeException(e);
                }
                currentlyRunning.decrementAndGet();
                someoneRan.set(true);
            }
        }));
        queryFutures.add(queryExecutor.submit(new Runnable() {

            @Override
            public void run() {
                QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
                TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
                List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
                for (Result<TimeseriesResultValue> result : results) {
                    if (someoneRan.get()) {
                        Assert.assertTrue(result.getValue().getDoubleMetric("doubleSumResult0") > 0);
                    }
                }
                if (currentlyRunning.get() > 0) {
                    concurrentlyRan.set(true);
                }
            }
        }));
    }
    List<ListenableFuture<?>> allFutures = new ArrayList<>(queryFutures.size() + indexFutures.size());
    allFutures.addAll(queryFutures);
    allFutures.addAll(indexFutures);
    Futures.allAsList(allFutures).get();
    // Assert.assertTrue("Did not hit concurrency, please try again", concurrentlyRan.get());
    queryExecutor.shutdown();
    indexExecutor.shutdown();
    QueryRunner<Result<TimeseriesResultValue>> runner = new FinalizeResultsQueryRunner<Result<TimeseriesResultValue>>(factory.createRunner(incrementalIndexSegment), factory.getToolchest());
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("xxx").granularity(Granularities.ALL).intervals(ImmutableList.of(queryInterval)).aggregators(queryAggregatorFactories).build();
    List<Result<TimeseriesResultValue>> results = runner.run(QueryPlus.wrap(query)).toList();
    final int expectedVal = elementsPerThread * taskCount;
    for (Result<TimeseriesResultValue> result : results) {
        Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue());
        for (int i = 0; i < DIMENSION_COUNT; ++i) {
            Assert.assertEquals(StringUtils.format("Failed long sum on dimension %d", i), expectedVal, result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue());
            Assert.assertEquals(StringUtils.format("Failed double sum on dimension %d", i), expectedVal, result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue());
        }
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) ArrayList(java.util.ArrayList) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) IncrementalIndexSegment(org.apache.druid.segment.IncrementalIndexSegment) Segment(org.apache.druid.segment.Segment) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) QueryRunnerFactory(org.apache.druid.query.QueryRunnerFactory) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) ListeningExecutorService(com.google.common.util.concurrent.ListeningExecutorService) Interval(org.joda.time.Interval) Ignore(org.junit.Ignore) Test(org.junit.Test) BenchmarkOptions(com.carrotsearch.junitbenchmarks.BenchmarkOptions)

Example 18 with TimeseriesQueryRunnerFactory

use of org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory in project druid by druid-io.

the class SpatialFilterBonusTest method testSpatialQueryMorePoints.

@Test
public void testSpatialQueryMorePoints() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 17L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 47L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) RectangularBound(org.apache.druid.collections.spatial.search.RectangularBound) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) Test(org.junit.Test)

Example 19 with TimeseriesQueryRunnerFactory

use of org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory in project druid by druid-io.

the class SpatialFilterTest method testSpatialQueryMorePoints.

@Test
public void testSpatialQueryMorePoints() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 17L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).build())), new Result<TimeseriesResultValue>(DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 47L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) RectangularBound(org.apache.druid.collections.spatial.search.RectangularBound) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 20 with TimeseriesQueryRunnerFactory

use of org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory in project druid by druid-io.

the class IndexMergerV9WithSpatialIndexTest method testSpatialQueryMorePoints.

@Test
public void testSpatialQueryMorePoints() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.DAY).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("dim.geo", new RectangularBound(new float[] { 0.0f, 0.0f }, new float[] { 9.0f, 9.0f }))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
    List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 17L).build())), new Result<>(DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 29L).build())), new Result<>(DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())), new Result<>(DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 91L).build())), new Result<>(DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 47L).build())));
    try {
        TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
        QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
        TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TimeseriesResultValue(org.apache.druid.query.timeseries.TimeseriesResultValue) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) TimeseriesQueryQueryToolChest(org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest) QueryRunner(org.apache.druid.query.QueryRunner) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) IOException(java.io.IOException) RectangularBound(org.apache.druid.collections.spatial.search.RectangularBound) Result(org.apache.druid.query.Result) TimeseriesQueryEngine(org.apache.druid.query.timeseries.TimeseriesQueryEngine) SpatialDimFilter(org.apache.druid.query.filter.SpatialDimFilter) TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

TimeseriesQueryRunnerFactory (org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory)23 TimeseriesQueryEngine (org.apache.druid.query.timeseries.TimeseriesQueryEngine)20 TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)20 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)18 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)16 Test (org.junit.Test)16 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)15 Result (org.apache.druid.query.Result)13 TimeseriesResultValue (org.apache.druid.query.timeseries.TimeseriesResultValue)13 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)12 IOException (java.io.IOException)10 QueryRunner (org.apache.druid.query.QueryRunner)9 SpatialDimFilter (org.apache.druid.query.filter.SpatialDimFilter)9 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)9 RadiusBound (org.apache.druid.collections.spatial.search.RadiusBound)5 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)5 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)5 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)5 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)3 ArrayList (java.util.ArrayList)3