use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class SpecificSegmentQueryRunner method run.
@Override
public Sequence<T> run(final Query<T> input, final Map<String, Object> responseContext) {
final Query<T> query = input.withQuerySegmentSpec(specificSpec);
final Thread currThread = Thread.currentThread();
final String currThreadName = currThread.getName();
final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals());
final Sequence<T> baseSequence = doNamed(currThread, currThreadName, newName, new Supplier<Sequence<T>>() {
@Override
public Sequence<T> get() {
return base.run(query, responseContext);
}
});
Sequence<T> segmentMissingCatchingSequence = new Sequence<T>() {
@Override
public <OutType> OutType accumulate(final OutType initValue, final Accumulator<OutType, T> accumulator) {
try {
return baseSequence.accumulate(initValue, accumulator);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return initValue;
}
}
@Override
public <OutType> Yielder<OutType> toYielder(final OutType initValue, final YieldingAccumulator<OutType, T> accumulator) {
try {
return makeYielder(baseSequence.toYielder(initValue, accumulator));
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
private <OutType> Yielder<OutType> makeYielder(final Yielder<OutType> yielder) {
return new Yielder<OutType>() {
@Override
public OutType get() {
return yielder.get();
}
@Override
public Yielder<OutType> next(final OutType initValue) {
try {
return yielder.next(initValue);
} catch (SegmentMissingException e) {
appendMissingSegment(responseContext);
return Yielders.done(initValue, null);
}
}
@Override
public boolean isDone() {
return yielder.isDone();
}
@Override
public void close() throws IOException {
yielder.close();
}
};
}
};
return Sequences.wrap(segmentMissingCatchingSequence, new SequenceWrapper() {
@Override
public <RetType> RetType wrap(Supplier<RetType> sequenceProcessing) {
return doNamed(currThread, currThreadName, newName, sequenceProcessing);
}
});
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class CachingClusteredClientTest method testOutOfOrderSequenceMerging.
@Test
public void testOutOfOrderSequenceMerging() throws Exception {
List<Sequence<Result<TopNResultValue>>> sequences = ImmutableList.of(Sequences.simple(makeTopNResultsWithoutRename(new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983)), Sequences.simple(makeTopNResultsWithoutRename(new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983)));
TestHelper.assertExpectedResults(makeTopNResultsWithoutRename(new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983), client.mergeCachedAndUncachedSequences(new TopNQueryBuilder().dataSource("test").intervals("2011-01-06/2011-01-10").dimension("a").metric("b").threshold(3).aggregators(Arrays.<AggregatorFactory>asList(new CountAggregatorFactory("b"))).build(), sequences));
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class CachingQueryRunnerTest method testCloseAndPopulate.
private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
final AssertingClosable closable = new AssertingClosable();
final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {
@Override
public void before() {
Assert.assertFalse(closable.isClosed());
}
@Override
public void after(boolean isDone, Throwable thrown) throws Exception {
closable.close();
}
});
final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
Cache cache = new Cache() {
private final Map<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();
@Override
public byte[] get(NamedKey key) {
return baseMap.get(key);
}
@Override
public void put(NamedKey key, byte[] value) {
baseMap.put(key, value);
cacheMustBePutOnce.countDown();
}
@Override
public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
return null;
}
@Override
public void close(String namespace) {
}
@Override
public CacheStats getStats() {
return null;
}
@Override
public boolean isLocal() {
return true;
}
@Override
public void doMonitor(ServiceEmitter emitter) {
}
};
String segmentIdentifier = "segment";
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
DefaultObjectMapper objectMapper = new DefaultObjectMapper();
CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return resultSeq;
}
}, backgroundExecutorService, new CacheConfig() {
@Override
public boolean isPopulateCache() {
return true;
}
@Override
public boolean isUseCache() {
return true;
}
});
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
HashMap<String, Object> context = new HashMap<String, Object>();
Sequence res = runner.run(query, context);
// base sequence is not closed yet
Assert.assertFalse("sequence must not be closed", closable.isClosed());
Assert.assertNull("cache must be empty", cache.get(cacheKey));
ArrayList results = Sequences.toList(res, new ArrayList());
Assert.assertTrue(closable.isClosed());
Assert.assertEquals(expectedRes.toString(), results.toString());
// wait for background caching finish
// wait at most 10 seconds to fail the test to avoid block overall tests
Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
byte[] cacheValue = cache.get(cacheKey);
Assert.assertNotNull(cacheValue);
Function<Object, Result> fn = cacheStrategy.pullFromCache();
List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class CachingQueryRunnerTest method testUseCache.
private void testUseCache(List<Result> expectedResults, Query query, QueryToolChest toolchest) throws Exception {
DefaultObjectMapper objectMapper = new DefaultObjectMapper();
String segmentIdentifier = "segment";
SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0);
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(segmentIdentifier, segmentDescriptor, cacheStrategy.computeCacheKey(query));
Cache cache = MapCache.create(1024 * 1024);
CacheUtil.populate(cache, objectMapper, cacheKey, Iterables.transform(expectedResults, cacheStrategy.prepareForCache()));
CachingQueryRunner runner = new CachingQueryRunner(segmentIdentifier, segmentDescriptor, objectMapper, cache, toolchest, // return an empty sequence since results should get pulled from cache
new QueryRunner() {
@Override
public Sequence run(Query query, Map responseContext) {
return Sequences.empty();
}
}, backgroundExecutorService, new CacheConfig() {
@Override
public boolean isPopulateCache() {
return true;
}
@Override
public boolean isUseCache() {
return true;
}
});
HashMap<String, Object> context = new HashMap<String, Object>();
List<Result> results = Sequences.toList(runner.run(query, context), new ArrayList());
Assert.assertEquals(expectedResults.toString(), results.toString());
}
use of io.druid.java.util.common.guava.Sequence in project druid by druid-io.
the class DirectDruidClientTest method testQueryInterruptionExceptionLogMessage.
@Test
public void testQueryInterruptionExceptionLogMessage() throws JsonProcessingException {
HttpClient httpClient = EasyMock.createMock(HttpClient.class);
SettableFuture<Object> interruptionFuture = SettableFuture.create();
Capture<Request> capturedRequest = EasyMock.newCapture();
String hostName = "localhost:8080";
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject())).andReturn(interruptionFuture).anyTimes();
EasyMock.replay(httpClient);
DataSegment dataSegment = new DataSegment("test", new Interval("2013-01-01/2013-01-02"), new DateTime("2013-01-01").toString(), Maps.<String, Object>newHashMap(), Lists.<String>newArrayList(), Lists.<String>newArrayList(), NoneShardSpec.instance(), 0, 0L);
final ServerSelector serverSelector = new ServerSelector(dataSegment, new HighestPriorityTierSelectorStrategy(new ConnectionCountServerSelectorStrategy()));
DirectDruidClient client1 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, hostName, new NoopServiceEmitter());
QueryableDruidServer queryableDruidServer = new QueryableDruidServer(new DruidServer("test1", hostName, 0, "historical", DruidServer.DEFAULT_TIER, 0), client1);
serverSelector.addServerAndUpdateSegment(queryableDruidServer, dataSegment);
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
HashMap<String, List> context = Maps.newHashMap();
interruptionFuture.set(new ByteArrayInputStream("{\"error\":\"testing1\",\"errorMessage\":\"testing2\"}".getBytes()));
Sequence results = client1.run(query, context);
QueryInterruptedException actualException = null;
try {
Sequences.toList(results, Lists.newArrayList());
} catch (QueryInterruptedException e) {
actualException = e;
}
Assert.assertNotNull(actualException);
Assert.assertEquals("testing1", actualException.getErrorCode());
Assert.assertEquals("testing2", actualException.getMessage());
Assert.assertEquals(hostName, actualException.getHost());
EasyMock.verify(httpClient);
}
Aggregations