use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class CachingClusteredClientTest method testTimeseriesCachingTimeZone.
@Test
@SuppressWarnings("unchecked")
public void testTimeseriesCachingTimeZone() {
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(PT1H_TZ_GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-11-04/2011-11-08"), makeTimeResults(new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, new DateTime("2011-11-06", TIMEZONE), 23, 85312, new DateTime("2011-11-07", TIMEZONE), 85, 102));
TimeseriesQuery query = builder.intervals("2011-11-04/2011-11-08").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTimeResults(new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, new DateTime("2011-11-06", TIMEZONE), 23, 85312, new DateTime("2011-11-07", TIMEZONE), 85, 102), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class CachingClusteredClientTest method testQueryCaching.
@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
parseResults(queryIntervals, expectedResults, args);
for (int i = 0; i < queryIntervals.size(); ++i) {
List<Object> mocks = new ArrayList<>();
mocks.add(serverView);
final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
List<Capture> queryCaptures = new ArrayList<>();
final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
DruidServer server = entry.getKey();
ServerExpectations expectations = entry.getValue();
EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
final Capture<? extends QueryPlus> capture = Capture.newInstance();
final Capture<? extends ResponseContext> context = Capture.newInstance();
queryCaptures.add(capture);
QueryRunner queryable = expectations.getQueryRunner();
if (query instanceof TimeseriesQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
} else if (query instanceof TopNQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TopNResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
} else if (query instanceof SearchQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<SearchResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
} else if (query instanceof GroupByQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<ResultRow>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults((GroupByQuery) query, segmentIds, intervals, results)).once();
} else if (query instanceof TimeBoundaryQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeBoundaryResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
} else {
throw new ISE("Unknown query type[%s]", query.getClass());
}
}
final int expectedResultsRangeStart;
final int expectedResultsRangeEnd;
if (query instanceof TimeBoundaryQuery) {
expectedResultsRangeStart = i;
expectedResultsRangeEnd = i + 1;
} else {
expectedResultsRangeStart = 0;
expectedResultsRangeEnd = i + 1;
}
runWithMocks(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numTimesToQuery; ++i) {
TestHelper.assertExpectedResults(new MergeIterable(query instanceof GroupByQuery ? ((GroupByQuery) query).getResultOrdering() : Comparators.naturalNullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {
@Override
public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
List<Iterable<Result<Object>>> retVal = new ArrayList<>();
final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
for (ServerExpectations expectations : exps.values()) {
for (ServerExpectation expectation : expectations) {
retVal.add(expectation.getResults());
}
}
return retVal;
}
})), runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval)))), initializeResponseContext()));
if (queryCompletedCallback != null) {
queryCompletedCallback.run();
}
}
}
}, mocks.toArray());
// make sure all the queries were sent down as 'bySegment'
for (Capture queryCapture : queryCaptures) {
QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
Query capturedQuery = capturedQueryPlus.getQuery();
if (expectBySegment) {
Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
} else {
Assert.assertTrue(capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false));
}
}
}
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class CachingClusteredClientTest method testTimeseriesMergingOutOfOrderPartitions.
@Test
public void testTimeseriesMergingOutOfOrderPartitions() {
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals(SEG_SPEC).filters(DIM_FILTER).granularity(GRANULARITY).aggregators(AGGS).postAggregators(POST_AGGS).context(CONTEXT);
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
testQueryCaching(runner, builder.randomQueryId().build(), Intervals.of("2011-01-05/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-05T02"), 80, 100, DateTimes.of("2011-01-06T02"), 420, 520, DateTimes.of("2011-01-07T02"), 12, 2194, DateTimes.of("2011-01-08T02"), 59, 201, DateTimes.of("2011-01-09T02"), 181, 52), Intervals.of("2011-01-05/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-05T00"), 85, 102, DateTimes.of("2011-01-06T00"), 412, 521, DateTimes.of("2011-01-07T00"), 122, 21894, DateTimes.of("2011-01-08T00"), 5, 20, DateTimes.of("2011-01-09T00"), 18, 521));
TimeseriesQuery query = builder.intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).randomQueryId().build();
TestHelper.assertExpectedResults(makeRenamedTimeResults(DateTimes.of("2011-01-05T00"), 85, 102, DateTimes.of("2011-01-05T02"), 80, 100, DateTimes.of("2011-01-06T00"), 412, 521, DateTimes.of("2011-01-06T02"), 420, 520, DateTimes.of("2011-01-07T00"), 122, 21894, DateTimes.of("2011-01-07T02"), 12, 2194, DateTimes.of("2011-01-08T00"), 5, 20, DateTimes.of("2011-01-08T02"), 59, 201, DateTimes.of("2011-01-09T00"), 18, 521, DateTimes.of("2011-01-09T02"), 181, 52), runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class CachingQueryRunnerTest method testTimeseries.
@Test
public void testTimeseries() throws Exception {
for (boolean descending : new boolean[] { false, true }) {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.DAY_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Arrays.asList(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.QUALITY_UNIQUES)).descending(descending).build();
Result row1 = new Result(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
Result row2 = new Result<>(DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9)));
List<Result> expectedResults;
if (descending) {
expectedResults = Lists.newArrayList(row2, row1);
} else {
expectedResults = Lists.newArrayList(row1, row2);
}
QueryToolChest toolChest = new TimeseriesQueryQueryToolChest();
testCloseAndPopulate(expectedResults, expectedResults, query, toolChest);
testUseCache(expectedResults, query, toolChest);
}
}
use of org.apache.druid.query.timeseries.TimeseriesQuery in project druid by druid-io.
the class CachingClusteredClientTest method testNoSegmentPruningForHashPartitionedSegments.
private void testNoSegmentPruningForHashPartitionedSegments(boolean enableSegmentPruning, @Nullable HashPartitionFunction partitionFunction, boolean useEmptyPartitionDimensions) {
DimFilter filter = new AndDimFilter(new SelectorDimFilter("dim1", "a", null), new BoundDimFilter("dim2", "e", "zzz", true, true, false, null, StringComparators.LEXICOGRAPHIC), // Equivalent filter of dim3 below is InDimFilter("dim3", Arrays.asList("c"), null)
new AndDimFilter(new InDimFilter("dim3", Arrays.asList("a", "c", "e", "g"), null), new BoundDimFilter("dim3", "aaa", "ddd", false, false, false, null, StringComparators.LEXICOGRAPHIC)));
final Map<String, Object> context = new HashMap<>(CONTEXT);
context.put(QueryContexts.SECONDARY_PARTITION_PRUNING_KEY, enableSegmentPruning);
final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).filters(filter).granularity(GRANULARITY).intervals(SEG_SPEC).intervals("2011-01-05/2011-01-10").aggregators(RENAMED_AGGS).postAggregators(RENAMED_POST_AGGS).context(context).randomQueryId();
TimeseriesQuery query = builder.build();
QueryRunner runner = new FinalizeResultsQueryRunner(getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest());
final Interval interval1 = Intervals.of("2011-01-06/2011-01-07");
final Interval interval2 = Intervals.of("2011-01-07/2011-01-08");
final Interval interval3 = Intervals.of("2011-01-08/2011-01-09");
final DruidServer lastServer = servers[random.nextInt(servers.length)];
List<String> partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim1");
final int numPartitions1 = 6;
for (int i = 0; i < numPartitions1; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions1);
timeline.add(interval1, "v", new NumberedPartitionChunk<>(i, numPartitions1, selector));
}
partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim2");
final int numPartitions2 = 3;
for (int i = 0; i < numPartitions2; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions2);
timeline.add(interval2, "v", new NumberedPartitionChunk<>(i, numPartitions2, selector));
}
partitionDimensions = useEmptyPartitionDimensions ? ImmutableList.of() : ImmutableList.of("dim1", "dim3");
final int numPartitions3 = 4;
for (int i = 0; i < numPartitions3; i++) {
ServerSelector selector = makeMockHashBasedSelector(lastServer, partitionDimensions, partitionFunction, i, numPartitions3);
timeline.add(interval3, "v", new NumberedPartitionChunk<>(i, numPartitions3, selector));
}
final Capture<QueryPlus> capture = Capture.newInstance();
final Capture<ResponseContext> contextCap = Capture.newInstance();
QueryRunner mockRunner = EasyMock.createNiceMock(QueryRunner.class);
EasyMock.expect(mockRunner.run(EasyMock.capture(capture), EasyMock.capture(contextCap))).andReturn(Sequences.empty()).anyTimes();
EasyMock.expect(serverView.getQueryRunner(lastServer)).andReturn(mockRunner).anyTimes();
EasyMock.replay(serverView);
EasyMock.replay(mockRunner);
// Expected to read all segments
Set<SegmentDescriptor> expcetedDescriptors = new HashSet<>();
IntStream.range(0, numPartitions1).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval1, "v", i)));
IntStream.range(0, numPartitions2).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval2, "v", i)));
IntStream.range(0, numPartitions3).forEach(i -> expcetedDescriptors.add(new SegmentDescriptor(interval3, "v", i)));
runner.run(QueryPlus.wrap(query)).toList();
QuerySegmentSpec querySegmentSpec = ((TimeseriesQuery) capture.getValue().getQuery()).getQuerySegmentSpec();
Assert.assertSame(MultipleSpecificSegmentSpec.class, querySegmentSpec.getClass());
final Set<SegmentDescriptor> actualDescriptors = new HashSet<>(((MultipleSpecificSegmentSpec) querySegmentSpec).getDescriptors());
Assert.assertEquals(expcetedDescriptors, actualDescriptors);
}
Aggregations