use of org.apache.druid.query.Result in project druid by druid-io.
the class CachingClusteredClientTest method testQueryCaching.
@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
parseResults(queryIntervals, expectedResults, args);
for (int i = 0; i < queryIntervals.size(); ++i) {
List<Object> mocks = new ArrayList<>();
mocks.add(serverView);
final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
List<Capture> queryCaptures = new ArrayList<>();
final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
DruidServer server = entry.getKey();
ServerExpectations expectations = entry.getValue();
EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
final Capture<? extends QueryPlus> capture = Capture.newInstance();
final Capture<? extends ResponseContext> context = Capture.newInstance();
queryCaptures.add(capture);
QueryRunner queryable = expectations.getQueryRunner();
if (query instanceof TimeseriesQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
} else if (query instanceof TopNQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TopNResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
} else if (query instanceof SearchQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<SearchResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
} else if (query instanceof GroupByQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<ResultRow>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults((GroupByQuery) query, segmentIds, intervals, results)).once();
} else if (query instanceof TimeBoundaryQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeBoundaryResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
} else {
throw new ISE("Unknown query type[%s]", query.getClass());
}
}
final int expectedResultsRangeStart;
final int expectedResultsRangeEnd;
if (query instanceof TimeBoundaryQuery) {
expectedResultsRangeStart = i;
expectedResultsRangeEnd = i + 1;
} else {
expectedResultsRangeStart = 0;
expectedResultsRangeEnd = i + 1;
}
runWithMocks(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numTimesToQuery; ++i) {
TestHelper.assertExpectedResults(new MergeIterable(query instanceof GroupByQuery ? ((GroupByQuery) query).getResultOrdering() : Comparators.naturalNullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {
@Override
public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
List<Iterable<Result<Object>>> retVal = new ArrayList<>();
final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
for (ServerExpectations expectations : exps.values()) {
for (ServerExpectation expectation : expectations) {
retVal.add(expectation.getResults());
}
}
return retVal;
}
})), runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval)))), initializeResponseContext()));
if (queryCompletedCallback != null) {
queryCompletedCallback.run();
}
}
}
}, mocks.toArray());
// make sure all the queries were sent down as 'bySegment'
for (Capture queryCapture : queryCaptures) {
QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
Query capturedQuery = capturedQueryPlus.getQuery();
if (expectBySegment) {
Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
} else {
Assert.assertTrue(capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false));
}
}
}
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class CachingClusteredClientTest method parseResults.
public void parseResults(final List<Interval> queryIntervals, final List<List<Iterable<Result<Object>>>> expectedResults, Object... args) {
if (args.length % 2 != 0) {
throw new ISE("args.length must be divisible by two, was %d", args.length);
}
for (int i = 0; i < args.length; i += 2) {
final Interval interval = (Interval) args[i];
final Iterable<Result<Object>> results = (Iterable<Result<Object>>) args[i + 1];
if (queryIntervals.size() > 0 && interval.equals(queryIntervals.get(queryIntervals.size() - 1))) {
expectedResults.get(expectedResults.size() - 1).add(results);
} else {
queryIntervals.add(interval);
expectedResults.add(Lists.<Iterable<Result<Object>>>newArrayList(results));
}
}
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class CachingQueryRunnerTest method testCloseAndPopulate.
private void testCloseAndPopulate(List<Result> expectedRes, List<Result> expectedCacheRes, Query query, QueryToolChest toolchest) throws Exception {
final AssertingClosable closable = new AssertingClosable();
final Sequence resultSeq = Sequences.wrap(Sequences.simple(expectedRes), new SequenceWrapper() {
@Override
public void before() {
Assert.assertFalse(closable.isClosed());
}
@Override
public void after(boolean isDone, Throwable thrown) {
closable.close();
}
});
final CountDownLatch cacheMustBePutOnce = new CountDownLatch(1);
Cache cache = new Cache() {
private final ConcurrentMap<NamedKey, byte[]> baseMap = new ConcurrentHashMap<>();
@Override
public byte[] get(NamedKey key) {
return baseMap.get(key);
}
@Override
public void put(NamedKey key, byte[] value) {
baseMap.put(key, value);
cacheMustBePutOnce.countDown();
}
@Override
public Map<NamedKey, byte[]> getBulk(Iterable<NamedKey> keys) {
return null;
}
@Override
public void close(String namespace) {
}
@Override
public void close() {
}
@Override
public CacheStats getStats() {
return null;
}
@Override
public boolean isLocal() {
return true;
}
@Override
public void doMonitor(ServiceEmitter emitter) {
}
};
byte[] keyPrefix = RandomUtils.nextBytes(10);
CachingQueryRunner runner = makeCachingQueryRunner(keyPrefix, cache, toolchest, resultSeq);
CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query);
Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey(CACHE_ID, SEGMENT_DESCRIPTOR, Bytes.concat(keyPrefix, cacheStrategy.computeCacheKey(query)));
Assert.assertTrue(runner.canPopulateCache(query, cacheStrategy));
Sequence res = runner.run(QueryPlus.wrap(query));
// base sequence is not closed yet
Assert.assertFalse("sequence must not be closed", closable.isClosed());
Assert.assertNull("cache must be empty", cache.get(cacheKey));
List results = res.toList();
Assert.assertTrue(closable.isClosed());
Assert.assertEquals(expectedRes.toString(), results.toString());
// wait for background caching finish
// wait at most 10 seconds to fail the test to avoid block overall tests
Assert.assertTrue("cache must be populated", cacheMustBePutOnce.await(10, TimeUnit.SECONDS));
byte[] cacheValue = cache.get(cacheKey);
Assert.assertNotNull(cacheValue);
Function<Object, Result> fn = cacheStrategy.pullFromSegmentLevelCache();
List<Result> cacheResults = Lists.newArrayList(Iterators.transform(objectMapper.readValues(objectMapper.getFactory().createParser(cacheValue), cacheStrategy.getCacheObjectClazz()), fn));
Assert.assertEquals(expectedCacheRes.toString(), cacheResults.toString());
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class CachingQueryRunnerTest method testTimeseries.
@Test
public void testTimeseries() throws Exception {
for (boolean descending : new boolean[] { false, true }) {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).build();
Result row1 = new Result(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
Result row2 = new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
List<Result> expectedResults;
if (descending) {
expectedResults = Lists.newArrayList(row2, row1);
} else {
expectedResults = Lists.newArrayList(row1, row2);
}
QueryToolChest toolChest = new TimeseriesQueryQueryToolChest();
testCloseAndPopulate(expectedResults, expectedResults, query, toolChest);
testUseCache(expectedResults, query, toolChest);
}
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class DirectDruidClientTest method testRun.
@Test
public void testRun() throws Exception {
final URL url = new URL(StringUtils.format("http://%s/druid/v2/", hostName));
SettableFuture<InputStream> futureResult = SettableFuture.create();
Capture<Request> capturedRequest = EasyMock.newCapture();
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureResult).times(1);
SettableFuture futureException = SettableFuture.create();
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(futureException).times(1);
EasyMock.expect(httpClient.go(EasyMock.capture(capturedRequest), EasyMock.<HttpResponseHandler>anyObject(), EasyMock.anyObject(Duration.class))).andReturn(SettableFuture.create()).atLeastOnce();
EasyMock.replay(httpClient);
DirectDruidClient client2 = new DirectDruidClient(new ReflectionQueryToolChestWarehouse(), QueryRunnerTestHelper.NOOP_QUERYWATCHER, new DefaultObjectMapper(), httpClient, "http", "foo2", new NoopServiceEmitter());
QueryableDruidServer queryableDruidServer2 = new QueryableDruidServer(new DruidServer("test1", "localhost", null, 0, ServerType.HISTORICAL, DruidServer.DEFAULT_TIER, 0), client2);
serverSelector.addServerAndUpdateSegment(queryableDruidServer2, serverSelector.getSegment());
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build();
query = query.withOverriddenContext(ImmutableMap.of(DirectDruidClient.QUERY_FAIL_TIME, Long.MAX_VALUE));
Sequence s1 = client.run(QueryPlus.wrap(query));
Assert.assertTrue(capturedRequest.hasCaptured());
Assert.assertEquals(url, capturedRequest.getValue().getUrl());
Assert.assertEquals(HttpMethod.POST, capturedRequest.getValue().getMethod());
Assert.assertEquals(1, client.getNumOpenConnections());
// simulate read timeout
client.run(QueryPlus.wrap(query));
Assert.assertEquals(2, client.getNumOpenConnections());
futureException.setException(new ReadTimeoutException());
Assert.assertEquals(1, client.getNumOpenConnections());
// subsequent connections should work
client.run(QueryPlus.wrap(query));
client.run(QueryPlus.wrap(query));
client.run(QueryPlus.wrap(query));
Assert.assertTrue(client.getNumOpenConnections() == 4);
// produce result for first connection
futureResult.set(new ByteArrayInputStream(StringUtils.toUtf8("[{\"timestamp\":\"2014-01-01T01:02:03Z\", \"result\": 42.0}]")));
List<Result> results = s1.toList();
Assert.assertEquals(1, results.size());
Assert.assertEquals(DateTimes.of("2014-01-01T01:02:03Z"), results.get(0).getTimestamp());
Assert.assertEquals(3, client.getNumOpenConnections());
client2.run(QueryPlus.wrap(query));
client2.run(QueryPlus.wrap(query));
Assert.assertEquals(2, client2.getNumOpenConnections());
Assert.assertEquals(serverSelector.pick(null), queryableDruidServer2);
EasyMock.verify(httpClient);
}
Aggregations