use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingQueryRunnerTest method testUseCache.
private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
DefaultObjectMapper objectMapper = new DefaultObjectMapper();
String segmentIdentifier = "segment";
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
Cache cache = MapCache.create(1024 * 1024);
CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.empty();
}
}, backgroundExecutorService, new CacheConfig() {
@Override
public boolean isPopulateCache() {
return true;
}
@Override
public boolean isUseCache() {
return true;
}
});
HashMap<String, Object> context = new HashMap<String, Object>();
List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
Assert.assertEquals(expectedResults.toString(), results.toString());
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class AggregationTestHelper method makeStringSerdeQueryRunner.
public QueryRunner<Row> makeStringSerdeQueryRunner(final ObjectMapper mapper, final QueryToolChest toolChest, final Query<Row> query, final QueryRunner<Row> baseRunner) {
return new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> map) {
try {
Sequence<Row> resultSeq = baseRunner.run(query, Maps.<String, Object>newHashMap());
final Yielder yielder = resultSeq.toYielder(null, new YieldingAccumulator() {
@Override
public Object accumulate(Object accumulated, Object in) {
yield();
return in;
}
});
String resultStr = mapper.writer().writeValueAsString(yielder);
TypeFactory typeFactory = mapper.getTypeFactory();
JavaType baseType = typeFactory.constructType(toolChest.getResultTypeReference());
List resultRows = Lists.transform(readQueryResultArrayFromString(resultStr), toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing()));
return Sequences.simple(resultRows);
} catch (Exception ex) {
throw Throwables.propagate(ex);
}
}
};
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class RealtimeManagerTest method testQueryWithMultipleSegmentSpec.
@Test(timeout = 10_000L)
public void testQueryWithMultipleSegmentSpec() throws IOException, InterruptedException {
List<Row> expectedResults_both_partitions = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "business", "rows", 2L, "idx", 260L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "health", "rows", 2L, "idx", 236L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "mezzanine", "rows", 4L, "idx", 4556L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "news", "rows", 2L, "idx", 284L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "technology", "rows", 2L, "idx", 202L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-27", "alias", "automotive", "rows", 2L, "idx", 288L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-27", "alias", "entertainment", "rows", 2L, "idx", 326L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "automotive", "rows", 2L, "idx", 312L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "business", "rows", 2L, "idx", 248L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "entertainment", "rows", 2L, "idx", 326L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "health", "rows", 2L, "idx", 262L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "mezzanine", "rows", 6L, "idx", 5126L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "news", "rows", 2L, "idx", 254L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "premium", "rows", 6L, "idx", 5276L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "technology", "rows", 2L, "idx", 206L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "travel", "rows", 2L, "idx", 260L));
List<Row> expectedResults_single_partition_26_28 = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "business", "rows", 1L, "idx", 130L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "health", "rows", 1L, "idx", 118L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "mezzanine", "rows", 2L, "idx", 2278L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "news", "rows", 1L, "idx", 142L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-26", "alias", "technology", "rows", 1L, "idx", 101L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-27", "alias", "automotive", "rows", 1L, "idx", 144L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-27", "alias", "entertainment", "rows", 1L, "idx", 163L));
List<Row> expectedResults_single_partition_28_29 = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "automotive", "rows", 1L, "idx", 156L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "business", "rows", 1L, "idx", 124L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "entertainment", "rows", 1L, "idx", 163L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "health", "rows", 1L, "idx", 131L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "mezzanine", "rows", 3L, "idx", 2563L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "news", "rows", 1L, "idx", 127L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "premium", "rows", 3L, "idx", 2638L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "technology", "rows", 1L, "idx", 103L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-03-28", "alias", "travel", "rows", 1L, "idx", 130L));
chiefStartedLatch.await();
final Interval interval_26_28 = new Interval("2011-03-26T00:00:00.000Z/2011-03-28T00:00:00.000Z");
final Interval interval_28_29 = new Interval("2011-03-28T00:00:00.000Z/2011-03-29T00:00:00.000Z");
final SegmentDescriptor descriptor_26_28_0 = new SegmentDescriptor(interval_26_28, "ver0", 0);
final SegmentDescriptor descriptor_28_29_0 = new SegmentDescriptor(interval_28_29, "ver1", 0);
final SegmentDescriptor descriptor_26_28_1 = new SegmentDescriptor(interval_26_28, "ver0", 1);
final SegmentDescriptor descriptor_28_29_1 = new SegmentDescriptor(interval_28_29, "ver1", 1);
GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(new MultipleSpecificSegmentSpec(ImmutableList.<SegmentDescriptor>of(descriptor_26_28_0, descriptor_28_29_0, descriptor_26_28_1, descriptor_28_29_1))).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
final Map<Interval, QueryRunner> runnerMap = ImmutableMap.<Interval, QueryRunner>of(interval_26_28, QueryRunnerTestHelper.makeQueryRunner(factory, "druid.sample.numeric.tsv.top", null), interval_28_29, QueryRunnerTestHelper.makeQueryRunner(factory, "druid.sample.numeric.tsv.bottom", null));
plumber.setRunners(runnerMap);
plumber2.setRunners(runnerMap);
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, query.getQuerySegmentSpec().lookup(query, realtimeManager3), query);
TestHelper.assertExpectedObjects(expectedResults_both_partitions, results, "");
results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(descriptor_26_28_0)), query);
TestHelper.assertExpectedObjects(expectedResults_single_partition_26_28, results, "");
results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(descriptor_28_29_0)), query);
TestHelper.assertExpectedObjects(expectedResults_single_partition_28_29, results, "");
results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(descriptor_26_28_1)), query);
TestHelper.assertExpectedObjects(expectedResults_single_partition_26_28, results, "");
results = GroupByQueryRunnerTestHelper.runQuery(factory, realtimeManager3.getQueryRunnerForSegments(query, ImmutableList.<SegmentDescriptor>of(descriptor_28_29_1)), query);
TestHelper.assertExpectedObjects(expectedResults_single_partition_28_29, results, "");
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class GroupByQueryRunnerTest method testPostAggMergedHavingSpec.
@Test
public void testPostAggMergedHavingSpec() {
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "index", 4420L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6L + 4420L + 1L)), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "index", 4416L, QueryRunnerTestHelper.addRowsIndexConstantMetric, (double) (6L + 4416L + 1L)));
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index"))).setPostAggregatorSpecs(ImmutableList.<PostAggregator>of(QueryRunnerTestHelper.addRowsIndexConstant)).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.<HavingSpec>of(new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L))));
final GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
// simulate two daily segments
final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
}
});
Map<String, Object> context = Maps.newHashMap();
TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged");
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergedPostAggHavingSpec.
@Test
public void testMergedPostAggHavingSpec() {
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, "rows_times_10", 20.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, "rows_times_10", 60.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, "rows_times_10", 60.0));
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setPostAggregatorSpecs(Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("rows_times_10", "*", Arrays.<PostAggregator>asList(new FieldAccessPostAggregator("rows", "rows"), new ConstantPostAggregator("const", 10L))))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.<HavingSpec>of(new GreaterThanHavingSpec("rows_times_10", 20L), new EqualToHavingSpec("idx", 217L))));
GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
// simulate two daily segments
final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
}
});
Map<String, Object> context = Maps.newHashMap();
// add an extra layer of merging, simulate broker forwarding query to historical
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(fullQuery, context), "merged");
fullQuery = fullQuery.withPostAggregatorSpecs(Arrays.<PostAggregator>asList(new ExpressionPostAggregator("rows_times_10", "rows * 10.0")));
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(fullQuery, context), "merged");
}
Aggregations