use of io.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class SpatialFilterTest method testSpatialQueryWithOtherSpatialDim.
@Test
public void testSpatialQueryWithOtherSpatialDim() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(new Result<TimeseriesResultValue>(new DateTime("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
TestHelper.assertExpectedResults(expectedResults, runner.run(query, Maps.newHashMap()));
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
use of io.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class ServerManager method getQueryRunnerForSegments.
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
if (factory == null) {
log.makeAlert("Unknown query type, [%s]", query.getClass()).addData("dataSource", query.getDataSource()).emit();
return new NoopQueryRunner<T>();
}
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
String dataSourceName = getDataSourceName(query.getDataSource());
final VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline = dataSources.get(dataSourceName);
if (timeline == null) {
return new NoopQueryRunner<T>();
}
final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = getBuilderFn(toolChest);
final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
FunctionalIterable<QueryRunner<T>> queryRunners = FunctionalIterable.create(specs).transformCat(new Function<SegmentDescriptor, Iterable<QueryRunner<T>>>() {
@Override
@SuppressWarnings("unchecked")
public Iterable<QueryRunner<T>> apply(SegmentDescriptor input) {
final PartitionHolder<ReferenceCountingSegment> entry = timeline.findEntry(input.getInterval(), input.getVersion());
if (entry == null) {
return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input));
}
final PartitionChunk<ReferenceCountingSegment> chunk = entry.getChunk(input.getPartitionNumber());
if (chunk == null) {
return Arrays.<QueryRunner<T>>asList(new ReportTimelineMissingSegmentQueryRunner<T>(input));
}
final ReferenceCountingSegment adapter = chunk.getObject();
return Arrays.asList(buildAndDecorateQueryRunner(factory, toolChest, adapter, input, builderFn, cpuTimeAccumulator));
}
});
return CPUTimeMetricQueryRunner.safeBuild(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(exec, queryRunners)), toolChest), builderFn, emitter, cpuTimeAccumulator, true);
}
use of io.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTopNOnPostAggMetricCaching.
@Test
public void testTopNOnPostAggMetricCaching() {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("avg_imps_per_row_double").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
testQueryCaching(runner, builder.build(), new Interval("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), new Interval("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983));
HashMap<String, List> context = new HashMap<String, List>();
TestHelper.assertExpectedResults(makeTopNResultsWithoutRename(new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(builder.intervals("2011-01-01/2011-01-10").metric("avg_imps_per_row_double").aggregators(AGGS).postAggregators(DIFF_ORDER_POST_AGGS).build(), context));
}
use of io.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testDisableUseCache.
@Test
public void testDisableUseCache() throws Exception {
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
testQueryCaching(runner, 1, true, builder.context(ImmutableMap.<String, Object>of("useCache", "false", "populateCache", "true")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
Assert.assertEquals(1, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(0, cache.getStats().getNumMisses());
cache.close("0_0");
testQueryCaching(runner, 1, false, builder.context(ImmutableMap.<String, Object>of("useCache", "false", "populateCache", "false")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
Assert.assertEquals(0, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(0, cache.getStats().getNumMisses());
testQueryCaching(client, 1, false, builder.context(ImmutableMap.<String, Object>of("useCache", "true", "populateCache", "false")).build(), new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000));
Assert.assertEquals(0, cache.getStats().getNumEntries());
Assert.assertEquals(0, cache.getStats().getNumHits());
Assert.assertEquals(1, cache.getStats().getNumMisses());
}
use of io.druid.query.FinalizeResultsQueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testSelectCaching.
@Test
public void testSelectCaching() throws Exception {
final Set<String> dimensions = Sets.<String>newHashSet("a");
final Set<String> metrics = Sets.<String>newHashSet("rows");
Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).dimensions(Arrays.asList("a")).metrics(Arrays.asList("rows")).pagingSpec(new PagingSpec(null, 3)).context(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), new Interval("2011-01-02/2011-01-03"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)));
QueryRunner runner = new FinalizeResultsQueryRunner(client, new SelectQueryQueryToolChest(jsonMapper, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier));
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
Aggregations