use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.
the class CachingClusteredClientTest method testNoSegmentPruningForHashPartitionedSegments.
private void testNoSegmentPruningForHashPartitionedSegments(boolean enableSegmentPruning, @Nullable HashPartitionFunction partitionFunction, boolean useEmptyPartitionDimensions) {
DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Map<String, Object> context = new HashMap<>(CONTEXT);
context.put(QueryContexts.SECONDARY_PARTITION_PRUNING_KEY, enableSegmentPruning);
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).context(context).randomQueryId();
TimeseriesQuery query = builder.build();
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
final DruidServer lastServer = servers[random.nextInt(servers.length)];
List<String> partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim1");
final int numPartitions1 = 6;
for (int i = 0; i < numPartitions1; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions1);
timeline.add(interval1, "v", new NumberedPartitionChunk<>(i, numPartitions1, selector));
}
partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim2");
final int numPartitions2 = 3;
for (int i = 0; i < numPartitions2; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions2);
timeline.add(interval2, "v", new NumberedPartitionChunk<>(i, numPartitions2, selector));
}
partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim1", "dim3");
final int numPartitions3 = 4;
for (int i = 0; i < numPartitions3; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions3);
timeline.add(interval3, "v", new NumberedPartitionChunk<>(i, numPartitions3, selector));
}
final Capture<QueryPlus> capture = Capture.newInstance();
final Capture<ResponseContext> contextCap = Capture.newInstance();
QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
EasyMock.replay(serverView);
EasyMock.replay(mockRunner);
// Expected to read all segments
Set<SegmentDescriptor> expcetedDescriptors = new HashSet<>();
IntStream.range(0, numPartitions1).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", i)));
IntStream.range(0, numPartitions2).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", i)));
IntStream.range(0, numPartitions3).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", i)));
runner.run(QueryPlus.wrap(query)).toList();
QuerySegmentSpec querySegmentSpec = ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec();
Assert.assertSame(MultipleSpecificSegmentSpec.class, querySegmentSpec.getClass());
final Set<SegmentDescriptor> actualDescriptors = new HashSet<>(((MultipleSpecificSegmentSpec) querySegmentSpec).getDescriptors());
Assert.assertEquals(expcetedDescriptors, actualDescriptors);
}
use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.
the class TestHttpClient method go.
@Override
public <Intermediate, Final> ListenableFuture<Final> go(Request request, HttpResponseHandler<Intermediate, Final> handler, Duration readTimeout) {
try {
final Query query = objectMapper.readValue(request.getContent().array(), Query.class);
final QueryRunner queryRunner = servers.get(request.getUrl()).getQueryRunner();
if (queryRunner == null) {
throw new ISE("Can't find queryRunner for url[%s]", request.getUrl());
}
final ResponseContext responseContext = ResponseContext.createEmpty();
final Sequence sequence = queryRunner.run(QueryPlus.wrap(query), responseContext);
final byte[] serializedContent;
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
objectMapper.writeValue(baos, sequence);
serializedContent = baos.toByteArray();
}
final ResponseContext.SerializationResult serializationResult = responseContext.serializeWith(objectMapper, RESPONSE_CTX_HEADER_LEN_LIMIT);
final HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
response.headers().add(QueryResource.HEADER_RESPONSE_CONTEXT, serializationResult.getResult());
response.setContent(HeapChannelBufferFactory.getInstance().getBuffer(serializedContent, 0, serializedContent.length));
final ClientResponse<Intermediate> intermClientResponse = handler.handleResponse(response, NOOP_TRAFFIC_COP);
final ClientResponse<Final> finalClientResponse = handler.done(intermClientResponse);
return Futures.immediateFuture(finalClientResponse.getObj());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.
the class UnionQueryRunnerTest method testUnionQueryRunner.
@Test
public void testUnionQueryRunner() {
AtomicBoolean ds1 = new AtomicBoolean(false);
AtomicBoolean ds2 = new AtomicBoolean(false);
QueryRunner baseRunner = new QueryRunner() {
@Override
public Sequence run(QueryPlus queryPlus, ResponseContext responseContext) {
// verify that table datasource is passed to baseQueryRunner
Assert.assertTrue(queryPlus.getQuery().getDataSource() instanceof TableDataSource);
String dsName = Iterables.getOnlyElement(queryPlus.getQuery().getDataSource().getTableNames());
if ("ds1".equals(dsName)) {
ds1.compareAndSet(false, true);
return Sequences.simple(Arrays.asList(1, 2, 3));
} else if ("ds2".equals(dsName)) {
ds2.compareAndSet(false, true);
return Sequences.simple(Arrays.asList(4, 5, 6));
} else {
throw new AssertionError("Unexpected DataSource");
}
}
};
UnionQueryRunner runner = new UnionQueryRunner(baseRunner);
// Make a dummy query with Union datasource
Query q = Druids.newTimeseriesQueryBuilder().dataSource(new UnionDataSource(Arrays.asList(new TableDataSource("ds1"), new TableDataSource("ds2")))).intervals("2014-01-01T00:00:00Z/2015-01-01T00:00:00Z").aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS).build();
ResponseContext responseContext = ResponseContext.createEmpty();
Sequence<?> result = runner.run(QueryPlus.wrap(q), responseContext);
List res = result.toList();
Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6), res);
Assert.assertEquals(true, ds1.get());
Assert.assertEquals(true, ds2.get());
}
use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.
the class ClientQuerySegmentWalkerTest method initWalker.
/**
* Initialize (or reinitialize) our {@link #walker} and {@link #closer}.
*/
private void initWalker(final Map<String, String> serverProperties, QueryScheduler schedulerForTest) {
final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
final ServerConfig serverConfig = jsonMapper.convertValue(serverProperties, ServerConfig.class);
final SegmentWrangler segmentWrangler = new MapSegmentWrangler(ImmutableMap.<Class<? extends DataSource>, SegmentWrangler>builder().put(InlineDataSource.class, new InlineSegmentWrangler()).build());
final JoinableFactory globalFactory = new JoinableFactory() {
@Override
public boolean isDirectlyJoinable(DataSource dataSource) {
return ((GlobalTableDataSource) dataSource).getName().equals(GLOBAL);
}
@Override
public Optional<Joinable> build(DataSource dataSource, JoinConditionAnalysis condition) {
return Optional.empty();
}
};
final JoinableFactory joinableFactory = new MapJoinableFactory(ImmutableSet.of(globalFactory, new InlineJoinableFactory()), ImmutableMap.<Class<? extends JoinableFactory>, Class<? extends DataSource>>builder().put(InlineJoinableFactory.class, InlineDataSource.class).put(globalFactory.getClass(), GlobalTableDataSource.class).build());
class CapturingWalker implements QuerySegmentWalker {
private QuerySegmentWalker baseWalker;
private ClusterOrLocal how;
CapturingWalker(QuerySegmentWalker baseWalker, ClusterOrLocal how) {
this.baseWalker = baseWalker;
this.how = how;
}
@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals) {
final QueryRunner<T> baseRunner = baseWalker.getQueryRunnerForIntervals(query, intervals);
return (queryPlus, responseContext) -> {
log.info("Query (%s): %s", how, queryPlus.getQuery());
issuedQueries.add(new ExpectedQuery(queryPlus.getQuery(), how));
return baseRunner.run(queryPlus, responseContext);
};
}
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(Query<T> query, Iterable<SegmentDescriptor> specs) {
final QueryRunner<T> baseRunner = baseWalker.getQueryRunnerForSegments(query, specs);
return (queryPlus, responseContext) -> {
log.info("Query (%s): %s", how, queryPlus.getQuery());
issuedQueries.add(new ExpectedQuery(queryPlus.getQuery(), how));
return baseRunner.run(queryPlus, responseContext);
};
}
}
walker = QueryStackTests.createClientQuerySegmentWalker(new CapturingWalker(QueryStackTests.createClusterQuerySegmentWalker(ImmutableMap.<String, VersionedIntervalTimeline<String, ReferenceCountingSegment>>builder().put(FOO, makeTimeline(FOO, FOO_INLINE)).put(BAR, makeTimeline(BAR, BAR_INLINE)).put(MULTI, makeTimeline(MULTI, MULTI_VALUE_INLINE)).put(GLOBAL, makeTimeline(GLOBAL, FOO_INLINE)).put(ARRAY, makeTimeline(ARRAY, ARRAY_INLINE)).put(ARRAY_UNKNOWN, makeTimeline(ARRAY_UNKNOWN, ARRAY_INLINE_UNKNOWN)).build(), joinableFactory, conglomerate, schedulerForTest), ClusterOrLocal.CLUSTER), new CapturingWalker(QueryStackTests.createLocalQuerySegmentWalker(conglomerate, segmentWrangler, joinableFactory, schedulerForTest), ClusterOrLocal.LOCAL), conglomerate, joinableFactory, serverConfig);
}
use of org.apache.druid.query.context.ResponseContext in project druid by druid-io.
the class ReportTimelineMissingSegmentQueryRunnerTest method testRunWithOneSegment.
@Test
public void testRunWithOneSegment() {
final Interval interval = Intervals.of("2020-01-01/P1D");
final SegmentDescriptor missingSegment = new SegmentDescriptor(interval, "version", 0);
final ReportTimelineMissingSegmentQueryRunner<Object> runner = new ReportTimelineMissingSegmentQueryRunner<>(missingSegment);
final ResponseContext responseContext = DefaultResponseContext.createEmpty();
runner.run(QueryPlus.wrap(new TestQuery()), responseContext);
Assert.assertNotNull(responseContext.getMissingSegments());
Assert.assertEquals(Collections.singletonList(missingSegment), responseContext.getMissingSegments());
}
Aggregations