Search in sources :

Example 31 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class SpecificSegmentQueryRunner method run.

@Override
public Sequence<T> run(final Query<T> input, final Map<String, Object> responseContext) {
    final Query<T> query = input.withQuerySegmentSpec(specificSpec);
    final Thread currThread = Thread.currentThread();
    final String currThreadName = currThread.getName();
    final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals());
    final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, new Supplier<Sequence<T>>() {

        @Override
        public Sequence<T> get() {
            return base.run(query, responseContext);
        }
    });
    Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {

        @Override
        public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
            try {
                return baseSequence.accumulate(initValue, accumulator);
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return initValue;
            }
        }

        @Override
        public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
            try {
                return makeYielder(baseSequence.toYielder(initValue, accumulator));
            } catch (SegmentMissingException e) {
                appendMissingSegment(responseContext);
                return Yielders.done(initValue, null);
            }
        }

        private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
            return new Yielder<OutType>() {

                @Override
                public OutType get() {
                    return yielder.get();
                }

                @Override
                public Yielder<OutType> next(final OutType initValue) {
                    try {
                        return yielder.next(initValue);
                    } catch (SegmentMissingException e) {
                        appendMissingSegment(responseContext);
                        return Yielders.done(initValue, null);
                    }
                }

                @Override
                public boolean isDone() {
                    return yielder.isDone();
                }

                @Override
                public void close() throws IOException {
                    yielder.close();
                }
            };
        }
    };
    return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {

        @Override
        public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
            return doNamed(currThread, currThreadName, newName, sequenceProcessing);
        }
    });
}
Also used : YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator) Accumulator(io.druid.java.util.common.guava.Accumulator) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Yielder(io.druid.java.util.common.guava.Yielder) SegmentMissingException(io.druid.segment.SegmentMissingException) Sequence(io.druid.java.util.common.guava.Sequence) YieldingAccumulator(io.druid.java.util.common.guava.YieldingAccumulator)

Example 32 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class CachingClusteredClientTest method testOutOfOrderSequenceMerging.

@Test
public void testOutOfOrderSequenceMerging() throws Exception {
    List<Sequence<Result<TopNResultValue>>> sequences = ImmutableList.of(Sequences.simple(makeTopNResultsWithoutRename(new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983)), Sequences.simple(makeTopNResultsWithoutRename(new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983)));
    TestHelper.assertExpectedResults(makeTopNResultsWithoutRename(new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), client.mergeCachedAndUncachedSequences(new TopNQueryBuilder().dataSource("test").intervals("2011-01-06/2011-01-10").dimension("a").metric("b").threshold(3).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("b"))).build(), sequences));
}
Also used : TopNResultValue(io.druid.query.topn.TopNResultValue) TopNQueryBuilder(io.druid.query.topn.TopNQueryBuilder) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) Sequence(io.druid.java.util.common.guava.Sequence) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) DateTime(org.joda.time.DateTime) Test(org.junit.Test) GroupByQueryRunnerTest(io.druid.query.groupby.GroupByQueryRunnerTest)

Example 33 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class CachingQueryRunnerTest method testCloseAndPopulate.

private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
    final AssertingClosable closable = new AssertingClosable();
    final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {

        @Override
        public void before() {
            Assert.assertFalse(closable.isClosed());
        }

        @Override
        public void after(boolean isDone, Throwable thrown) throws Exception {
            closable.close();
        }
    });
    final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
    Cache cache = new Cache() {

        private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();

        @Override
        public byte[] get(NamedKey key) {
            return baseMap.get(key);
        }

        @Override
        public void put(NamedKey key, byte[] value) {
            baseMap.put(key, value);
            cacheMustBePutOnce.countDown();
        }

        @Override
        public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
            return null;
        }

        @Override
        public void close(String namespace) {
        }

        @Override
        public CacheStats getStats() {
            return null;
        }

        @Override
        public boolean isLocal() {
            return true;
        }

        @Override
        public void doMonitor(ServiceEmitter emitter) {
        }
    };
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return resultSeq;
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    HashMap<String, Object> context = new HashMap<String, Object>();
    Sequence res = runner.run(query, context);
    // base sequence is not closed yet
    Assert.assertFalse("sequence must not be closed", closable.isClosed());
    Assert.assertNull("cache must be empty", cache.get(cacheKey));
    ArrayList results = Sequences.toList(res, new ArrayList());
    Assert.assertTrue(closable.isClosed());
    Assert.assertEquals(expectedRes.toString(), results.toString());
    // wait for background caching finish
    // wait at most 10 seconds to fail the test to avoid block overall tests
    Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
    byte[] cacheValue = cache.get(cacheKey);
    Assert.assertNotNull(cacheValue);
    Function<Object, Result> fn = cacheStrategy.pullFromCache();
    List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
    Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) CacheConfig(io.druid.client.cache.CacheConfig) SequenceWrapper(io.druid.java.util.common.guava.SequenceWrapper) Sequence(io.druid.java.util.common.guava.Sequence) CountDownLatch(java.util.concurrent.CountDownLatch) IOException(java.io.IOException) QueryRunner(io.druid.query.QueryRunner) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheStrategy(io.druid.query.CacheStrategy) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache) Interval(org.joda.time.Interval)

Example 34 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class CachingQueryRunnerTest method testUseCache.

private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
    DefaultObjectMapper objectMapper = new DefaultObjectMapper();
    String segmentIdentifier = "segment";
    SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
    CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
    Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
    Cache cache = MapCache.create(1024 * 1024);
    CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
    CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
    new QueryRunner() {

        @Override
        public Sequence run(Query query, Map responseContext) {
            return Sequences.empty();
        }
    }, backgroundExecutorService, new CacheConfig() {

        @Override
        public boolean isPopulateCache() {
            return true;
        }

        @Override
        public boolean isUseCache() {
            return true;
        }
    });
    HashMap<String, Object> context = new HashMap<String, Object>();
    List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
    Assert.assertEquals(expectedResults.toString(), results.toString());
}
Also used : TimeseriesQuery(io.druid.query.timeseries.TimeseriesQuery) Query(io.druid.query.Query) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Sequence(io.druid.java.util.common.guava.Sequence) QueryRunner(io.druid.query.QueryRunner) Result(io.druid.query.Result) SegmentDescriptor(io.druid.query.SegmentDescriptor) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) CacheConfig(io.druid.client.cache.CacheConfig) CacheStrategy(io.druid.query.CacheStrategy) Interval(org.joda.time.Interval) Cache(io.druid.client.cache.Cache) MapCache(io.druid.client.cache.MapCache)

Example 35 with Sequence

use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.

the class DirectDruidClientTest method testQueryInterruptionExceptionLogMessage.

@Test
public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException {
    HttpClient httpClient = EasyMock.createMock(HttpClient.class);
    SettableFuture<Object> interruptionFuture = SettableFuture.create();
    Capture<Request> capturedRequest = EasyMock.newCapture();
    String hostName = "localhost:8080";
    EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(interruptionFuture).anyTimes();
    EasyMock.replay(httpClient);
    DataSegment dataSegment = new DataSegment("test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L);
    final ServerSelector serverSelector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));
    DirectDruidClient client1 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, hostName, new NoopServiceEmitter());
    QueryableDruidServer queryableDruidServer = new QueryableDruidServer(new DruidServer("test1", hostName, 0, "historical", DruidServer.DEFAULT_TIER, 0), client1);
    serverSelector.addServerAndUpdateSegment(queryableDruidServer, dataSegment);
    TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
    HashMap<String, List> context = Maps.newHashMap();
    interruptionFuture.set(new ByteArrayInputStream("{\"error\":\"testing1\",\"errorMessage\":\"testing2\"}".getBytes()));
    Sequence results = client1.run(query, context);
    QueryInterruptedException actualException = null;
    try {
        Sequences.toList(results, Lists.newArrayList());
    } catch (QueryInterruptedException e) {
        actualException = e;
    }
    Assert.assertNotNull(actualException);
    Assert.assertEquals("testing1", actualException.getErrorCode());
    Assert.assertEquals("testing2", actualException.getMessage());
    Assert.assertEquals(hostName, actualException.getHost());
    EasyMock.verify(httpClient);
}
Also used : TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) DataSegment(io.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) ServerSelector(io.druid.client.selector.ServerSelector) HighestPriorityTierSelectorStrategy(io.druid.client.selector.HighestPriorityTierSelectorStrategy) List(java.util.List) QueryInterruptedException(io.druid.query.QueryInterruptedException) ConnectionCountServerSelectorStrategy(io.druid.client.selector.ConnectionCountServerSelectorStrategy) Request(com.metamx.http.client.Request) QueryableDruidServer(io.druid.client.selector.QueryableDruidServer) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) Sequence(io.druid.java.util.common.guava.Sequence) ByteArrayInputStream(java.io.ByteArrayInputStream) HttpClient(com.metamx.http.client.HttpClient) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) HttpResponseHandler(com.metamx.http.client.response.HttpResponseHandler) ReflectionQueryToolChestWarehouse(io.druid.query.ReflectionQueryToolChestWarehouse) Interval(org.joda.time.Interval) Test(org.junit.Test)

Aggregations

Sequence (io.druid.java.util.common.guava.Sequence)56 Test (org.junit.Test)35 Interval (org.joda.time.Interval)26 DateTime (org.joda.time.DateTime)16 List (java.util.List)15 Query (io.druid.query.Query)14 Map (java.util.Map)14 QueryRunner (io.druid.query.QueryRunner)13 Result (io.druid.query.Result)12 GroupByQueryRunnerTest (io.druid.query.groupby.GroupByQueryRunnerTest)10 MergeSequence (io.druid.java.util.common.guava.MergeSequence)9 TimeseriesResultValue (io.druid.query.timeseries.TimeseriesResultValue)9 Row (io.druid.data.input.Row)8 ImmutableMap (com.google.common.collect.ImmutableMap)7 DefaultObjectMapper (io.druid.jackson.DefaultObjectMapper)7 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)7 DefaultDimensionSpec (io.druid.query.dimension.DefaultDimensionSpec)7 MultipleIntervalSegmentSpec (io.druid.query.spec.MultipleIntervalSegmentSpec)7 ArrayList (java.util.ArrayList)7 MapMaker (com.google.common.collect.MapMaker)6