Search in sources :

Example 46 with SegmentDescriptor

use of io.druid.query.SegmentDescriptor in project druid by druid-io.

the class CachingClusteredClientTest method testSingleDimensionPruning.

@Test
public void testSingleDimensionPruning() throws Exception {
    DimFilter filter = Druids.newAndDimFilterBuilder().fields(Arrays.asList(Druids.newOrDimFilterBuilder().fields(Arrays.asList(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim1", "from", "to", false, false, false, null, StringComparators.LEXICOGRAPHIC))).build(), Druids.newAndDimFilterBuilder().fields(Arrays.asList(new InDimFilter("dim2", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim2", "aaa", "hi", false, false, false, null, StringComparators.LEXICOGRAPHIC), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC))).build())).build();
    final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).context(CONTEXT).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS);
    TimeseriesQuery query = builder.build();
    Map<String, List> context = new HashMap<>();
    final Interval interval1 = new Interval("2011-01-06/2011-01-07");
    final Interval interval2 = new Interval("2011-01-07/2011-01-08");
    final Interval interval3 = new Interval("2011-01-08/2011-01-09");
    QueryRunner runner = new FinalizeResultsQueryRunner(client, new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()));
    final DruidServer lastServer = servers[random.nextInt(servers.length)];
    ServerSelector selector1 = makeMockSingleDimensionSelector(lastServer, "dim1", null, "b", 1);
    ServerSelector selector2 = makeMockSingleDimensionSelector(lastServer, "dim1", "e", "f", 2);
    ServerSelector selector3 = makeMockSingleDimensionSelector(lastServer, "dim1", "hi", "zzz", 3);
    ServerSelector selector4 = makeMockSingleDimensionSelector(lastServer, "dim2", "a", "e", 4);
    ServerSelector selector5 = makeMockSingleDimensionSelector(lastServer, "dim2", null, null, 5);
    ServerSelector selector6 = makeMockSingleDimensionSelector(lastServer, "other", "b", null, 6);
    timeline.add(interval1, "v", new StringPartitionChunk<>(null, "a", 1, selector1));
    timeline.add(interval1, "v", new StringPartitionChunk<>("a", "b", 2, selector2));
    timeline.add(interval1, "v", new StringPartitionChunk<>("b", null, 3, selector3));
    timeline.add(interval2, "v", new StringPartitionChunk<>(null, "d", 4, selector4));
    timeline.add(interval2, "v", new StringPartitionChunk<>("d", null, 5, selector5));
    timeline.add(interval3, "v", new StringPartitionChunk<>(null, null, 6, selector6));
    final Capture<TimeseriesQuery> capture = Capture.newInstance();
    final Capture<Map<String, List>> contextCap = Capture.newInstance();
    QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
    EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
    EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
    EasyMock.replay(serverView);
    EasyMock.replay(mockRunner);
    List<SegmentDescriptor> descriptors = new ArrayList<>();
    descriptors.add(new SegmentDescriptor(interval1, "v", 1));
    descriptors.add(new SegmentDescriptor(interval1, "v", 3));
    descriptors.add(new SegmentDescriptor(interval2, "v", 5));
    descriptors.add(new SegmentDescriptor(interval3, "v", 6));
    MultipleSpecificSegmentSpec expected = new MultipleSpecificSegmentSpec(descriptors);
    Sequences.toList(runner.run(query, context), Lists.newArrayList());
    Assert.assertEquals(expected, capture.getValue().getQuerySegmentSpec());
}
Also used : MultipleSpecificSegmentSpec(io.druid.query.spec.MultipleSpecificSegmentSpec) BoundDimFilter(io.druid.query.filter.BoundDimFilter) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TimeseriesQueryQueryToolChest(io.druid.query.timeseries.TimeseriesQueryQueryToolChest) ServerSelector(io.druid.client.selector.ServerSelector) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) SegmentDescriptor(io.druid.query.SegmentDescriptor) Druids(io.druid.query.Druids) InDimFilter(io.druid.query.filter.InDimFilter) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) QueryRunner(io.druid.query.QueryRunner) FinalizeResultsQueryRunner(io.druid.query.FinalizeResultsQueryRunner) BoundDimFilter(io.druid.query.filter.BoundDimFilter) InDimFilter(io.druid.query.filter.InDimFilter) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) DimFilter(io.druid.query.filter.DimFilter) Map(java.util.Map) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) Interval(org.joda.time.Interval) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 47 with SegmentDescriptor

use of io.druid.query.SegmentDescriptor in project druid by druid-io.

the class CachingQueryRunnerTest method testCloseAndPopulate.

private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
    final AssertingClosable closable = new AssertingClosable();
    final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {

        @Override
        public void before() {
            Assert.assertFalse(closable.isClosed());
        }

        @Override
        public void after(boolean isDone, Throwable thrown) throws Exception {
            closable.close();
        }
    });
    final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
    Cache cache = new Cache() {

        private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();

        @Override
        public byte[] get(NamedKey key) {
            return baseMap.get(key);
        }

        @Override
        public void put(NamedKey key, byte[] value) {
            baseMap.put(key, value);
            cacheMustBePutOnce.countDown();
        }

        @Override
        public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
            return null;
        }

        @Override
        public void close(String namespace) {
        }

        @Override
        public CacheStats getStats() {
            return null;
        }

        @Override
        public boolean isLocal() {
            return true;
        }

        @Override
        public void doMonitor(ServiceEmitter emitter) {
        }
    };
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return resultSeq;
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    HashMap<String, Object> context = new HashMap<String, Object>();
    Sequence res = runner.run(query, context);
    // base sequence is not closed yet
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    ArrayList results = Sequences.toList(res, new ArrayList());
    Assert.assertTrue(closable.isClosed());
    Assert.assertEquals(expectedRes.toString(), results.toString());
    // wait for background caching finish
    // wait at most 10 seconds to fail the test to avoid block overall tests
    Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
    byte[] cacheValue = cache.get(cacheKey);
    Assert.assertNotNull(cacheValue);
    Function<Object, Result> fn = cacheStrategy.pullFromCache();
    List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
    Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) CacheConfig(io.druid.client.cache.CacheConfig) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IOException(java.io.IOException) QueryRunner(io.druid.query.QueryRunner) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheStrategy(io.druid.query.CacheStrategy) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache) Interval(org.joda.time.Interval)

Example 48 with SegmentDescriptor

use of io.druid.query.SegmentDescriptor in project druid by druid-io.

the class CachingQueryRunnerTest method testUseCache.

private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Cache cache = MapCache.create(1024 * 1024);
    CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
    new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return Sequences.empty();
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    HashMap<String, Object> context = new HashMap<String, Object>();
    List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
    Assert.assertEquals(expectedResults.toString(), results.toString());
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Sequence(io.druid.java.util.common.guava.Sequence) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheConfig(io.druid.client.cache.CacheConfig) CacheStrategy(io.druid.query.CacheStrategy) Interval(org.joda.time.Interval) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache)

Aggregations

SegmentDescriptor (io.druid.query.SegmentDescriptor)48 Test (org.junit.Test)30 Interval (org.joda.time.Interval)19 TaskStatus (io.druid.indexing.common.TaskStatus)17 QueryRunner (io.druid.query.QueryRunner)12 Map (java.util.Map)11 Query (io.druid.query.Query)10 ImmutableMap (com.google.common.collect.ImmutableMap)8 Pair (io.druid.java.util.common.Pair)8 DataSegment (io.druid.timeline.DataSegment)8 Executor (java.util.concurrent.Executor)8 ImmutableSegmentLoadInfo (io.druid.client.ImmutableSegmentLoadInfo)7 TimeseriesQuery (io.druid.query.timeseries.TimeseriesQuery)7 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)6 IOException (java.io.IOException)6 List (java.util.List)6 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)5 Sequence (io.druid.java.util.common.guava.Sequence)5 SpecificSegmentQueryRunner (io.druid.query.spec.SpecificSegmentQueryRunner)5 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)4