use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testGroupByCachingRenamedAggs.
@Test
public void testGroupByCachingRenamedAggs() throws Exception {
GroupByQuery.Builder builder = new GroupByQuery.Builder().setDataSource(DATA_SOURCE).setQuerySegmentSpec(SEG_SPEC).setDimFilter(DIM_FILTER).setGranularity(GRANULARITY).setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec("a", "output"))).setAggregatorSpecs(AGGS).setContext(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeGroupByResults(new DateTime("2011-01-01"), ImmutableMap.of("output", "a", "rows", 1, "imps", 1, "impers", 1)), new Interval("2011-01-02/2011-01-03"), makeGroupByResults(new DateTime("2011-01-02"), ImmutableMap.of("output", "b", "rows", 2, "imps", 2, "impers", 2)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), new DateTime("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), new DateTime("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), new DateTime("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), new DateTime("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), new Interval("2011-01-05/2011-01-10"), makeGroupByResults(new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)));
QueryRunner runner = new FinalizeResultsQueryRunner(client, GroupByQueryRunnerTest.makeQueryRunnerFactory(new GroupByQueryConfig()).getToolchest());
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedObjects(makeGroupByResults(new DateTime("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), new DateTime("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), new DateTime("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), new DateTime("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), new DateTime("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7)), runner.run(builder.setInterval("2011-01-05/2011-01-10").build(), context), "");
TestHelper.assertExpectedObjects(makeGroupByResults(new DateTime("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), new DateTime("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), new DateTime("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), new DateTime("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), new DateTime("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), new DateTime("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), new DateTime("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), new DateTime("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), new DateTime("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), new DateTime("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7)), runner.run(builder.setInterval("2011-01-05/2011-01-10").setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec("a", "output2"))).setAggregatorSpecs(RENAMED_AGGS).build(), context), "renamed aggregators test");
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testSearchCaching.
@Test
public void testSearchCaching() throws Exception {
final Druids.SearchQueryBuilder builder = Druids.newSearchQueryBuilder().dataSource(DATA_SOURCE).filters(DIM_FILTER).granularity(GRANULARITY).limit(1000).intervals(SEG_SPEC).dimensions(Arrays.asList(TOP_DIM)).query("how").context(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults(TOP_DIM, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4));
QueryRunner runner = new FinalizeResultsQueryRunner(client, new SearchQueryQueryToolChest(new SearchQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testTopNCaching.
@Test
@SuppressWarnings("unchecked")
public void testTopNCaching() throws Exception {
final TopNQueryBuilder builder = new TopNQueryBuilder().dataSource(DATA_SOURCE).dimension(TOP_DIM).metric("imps").threshold(3).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(client, new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
testQueryCaching(runner, builder.build(), new Interval("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998), new Interval("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983), new Interval("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename(new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983));
HashMap<String, List> context = new HashMap<String, List>();
TestHelper.assertExpectedResults(makeRenamedTopNResults(new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995, new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983), runner.run(builder.intervals("2011-01-01/2011-01-10").metric("imps").aggregators(RENAMED_AGGS).postAggregators(DIFF_ORDER_POST_AGGS).build(), context));
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingClusteredClientTest method testSelectCachingRenamedOutputName.
@Test
public void testSelectCachingRenamedOutputName() throws Exception {
final Set<String> dimensions = Sets.<String>newHashSet("a");
final Set<String> metrics = Sets.<String>newHashSet("rows");
Druids.SelectQueryBuilder builder = Druids.newSelectQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).dimensions(Arrays.asList("a")).metrics(Arrays.asList("rows")).pagingSpec(new PagingSpec(null, 3)).context(CONTEXT);
testQueryCaching(client, builder.build(), new Interval("2011-01-01/2011-01-02"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), new Interval("2011-01-02/2011-01-03"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9)), new Interval("2011-01-05/2011-01-10"), makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)));
QueryRunner runner = new FinalizeResultsQueryRunner(client, new SelectQueryQueryToolChest(jsonMapper, QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier));
HashMap<String, Object> context = new HashMap<String, Object>();
TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").build(), context));
TestHelper.assertExpectedResults(makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), new DateTime("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), new DateTime("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), new DateTime("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), new DateTime("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), new DateTime("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), new DateTime("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), new DateTime("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), new DateTime("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), new DateTime("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), new DateTime("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), new DateTime("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9)), runner.run(builder.intervals("2011-01-01/2011-01-10").dimensionSpecs(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("a", "a2"))).build(), context));
}
use of io.druid.query.QueryRunner in project druid by druid-io.
the class CachingQueryRunnerTest method testCloseAndPopulate.
private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
final AssertingClosable closable = new AssertingClosable();
final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {
@Override
public void before() {
Assert.assertFalse(closable.isClosed());
}
@Override
public void after(boolean isDone, Throwable thrown) throws Exception {
closable.close();
}
});
final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
Cache cache = new Cache() {
private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();
@Override
public byte[] get(NamedKey key) {
return baseMap.get(key);
}
@Override
public void put(NamedKey key, byte[] value) {
baseMap.put(key, value);
cacheMustBePutOnce.countDown();
}
@Override
public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
return null;
}
@Override
public void close(String namespace) {
}
@Override
public CacheStats getStats() {
return null;
}
@Override
public boolean isLocal() {
return true;
}
@Override
public void doMonitor(ServiceEmitter emitter) {
}
};
String segmentIdentifier = "segment";
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
DefaultObjectMapper objectMapper = new DefaultObjectMapper();
CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return resultSeq;
}
}, backgroundExecutorService, new CacheConfig() {
@Override
public boolean isPopulateCache() {
return true;
}
@Override
public boolean isUseCache() {
return true;
}
});
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
HashMap<String, Object> context = new HashMap<String, Object>();
Sequence res = runner.run(query, context);
// base sequence is not closed yet
Assert.assertFalse("sequence must not be closed", closable.isClosed());
Assert.assertNull("cache must be empty", cache.get(cacheKey));
ArrayList results = Sequences.toList(res, new ArrayList());
Assert.assertTrue(closable.isClosed());
Assert.assertEquals(expectedRes.toString(), results.toString());
// wait for background caching finish
// wait at most 10 seconds to fail the test to avoid block overall tests
Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
byte[] cacheValue = cache.get(cacheKey);
Assert.assertNotNull(cacheValue);
Function<Object, Result> fn = cacheStrategy.pullFromCache();
List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Aggregations