use of org.apache.druid.query.timeboundary.TimeBoundaryResultValue in project druid by druid-io.
the class CachingClusteredClientTest method testQueryCaching.
@SuppressWarnings("unchecked")
public void testQueryCaching(final QueryRunner runner, final int numTimesToQuery, boolean expectBySegment, final Query query, // does this assume query intervals must be ordered?
Object... args) {
final List<Interval> queryIntervals = Lists.newArrayListWithCapacity(args.length / 2);
final List<List<Iterable<Result<Object>>>> expectedResults = Lists.newArrayListWithCapacity(queryIntervals.size());
parseResults(queryIntervals, expectedResults, args);
for (int i = 0; i < queryIntervals.size(); ++i) {
List<Object> mocks = new ArrayList<>();
mocks.add(serverView);
final Interval actualQueryInterval = new Interval(queryIntervals.get(0).getStart(), queryIntervals.get(i).getEnd());
final List<Map<DruidServer, ServerExpectations>> serverExpectationList = populateTimeline(queryIntervals, expectedResults, i, mocks);
List<Capture> queryCaptures = new ArrayList<>();
final Map<DruidServer, ServerExpectations> finalExpectation = serverExpectationList.get(serverExpectationList.size() - 1);
for (Map.Entry<DruidServer, ServerExpectations> entry : finalExpectation.entrySet()) {
DruidServer server = entry.getKey();
ServerExpectations expectations = entry.getValue();
EasyMock.expect(serverView.getQueryRunner(server)).andReturn(expectations.getQueryRunner()).once();
final Capture<? extends QueryPlus> capture = Capture.newInstance();
final Capture<? extends ResponseContext> context = Capture.newInstance();
queryCaptures.add(capture);
QueryRunner queryable = expectations.getQueryRunner();
if (query instanceof TimeseriesQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeseriesResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)).once();
} else if (query instanceof TopNQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TopNResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTopNResults(segmentIds, intervals, results)).once();
} else if (query instanceof SearchQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<SearchResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableSearchResults(segmentIds, intervals, results)).once();
} else if (query instanceof GroupByQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<ResultRow>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableGroupByResults((GroupByQuery) query, segmentIds, intervals, results)).once();
} else if (query instanceof TimeBoundaryQuery) {
List<SegmentId> segmentIds = new ArrayList<>();
List<Interval> intervals = new ArrayList<>();
List<Iterable<Result<TimeBoundaryResultValue>>> results = new ArrayList<>();
for (ServerExpectation expectation : expectations) {
segmentIds.add(expectation.getSegmentId());
intervals.add(expectation.getInterval());
results.add(expectation.getResults());
}
EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))).andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)).once();
} else {
throw new ISE("Unknown query type[%s]", query.getClass());
}
}
final int expectedResultsRangeStart;
final int expectedResultsRangeEnd;
if (query instanceof TimeBoundaryQuery) {
expectedResultsRangeStart = i;
expectedResultsRangeEnd = i + 1;
} else {
expectedResultsRangeStart = 0;
expectedResultsRangeEnd = i + 1;
}
runWithMocks(new Runnable() {
@Override
public void run() {
for (int i = 0; i < numTimesToQuery; ++i) {
TestHelper.assertExpectedResults(new MergeIterable(query instanceof GroupByQuery ? ((GroupByQuery) query).getResultOrdering() : Comparators.naturalNullsFirst(), FunctionalIterable.create(new RangeIterable(expectedResultsRangeStart, expectedResultsRangeEnd)).transformCat(new Function<Integer, Iterable<Iterable<Result<Object>>>>() {
@Override
public Iterable<Iterable<Result<Object>>> apply(@Nullable Integer input) {
List<Iterable<Result<Object>>> retVal = new ArrayList<>();
final Map<DruidServer, ServerExpectations> exps = serverExpectationList.get(input);
for (ServerExpectations expectations : exps.values()) {
for (ServerExpectation expectation : expectations) {
retVal.add(expectation.getResults());
}
}
return retVal;
}
})), runner.run(QueryPlus.wrap(query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(ImmutableList.of(actualQueryInterval)))), initializeResponseContext()));
if (queryCompletedCallback != null) {
queryCompletedCallback.run();
}
}
}
}, mocks.toArray());
// make sure all the queries were sent down as 'bySegment'
for (Capture queryCapture : queryCaptures) {
QueryPlus capturedQueryPlus = (QueryPlus) queryCapture.getValue();
Query capturedQuery = capturedQueryPlus.getQuery();
if (expectBySegment) {
Assert.assertEquals(true, capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY));
} else {
Assert.assertTrue(capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY) == null || capturedQuery.getContextValue(QueryContexts.BY_SEGMENT_KEY).equals(false));
}
}
}
}
use of org.apache.druid.query.timeboundary.TimeBoundaryResultValue in project druid by druid-io.
the class SchemalessTestSimpleTest method testTimeBoundary.
@Test
public void testTimeBoundary() {
TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("testing").build();
List<Result<TimeBoundaryResultValue>> expectedResults = Collections.singletonList(new Result<TimeBoundaryResultValue>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue(ImmutableMap.of(TimeBoundaryQuery.MIN_TIME, DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, DateTimes.of("2011-01-13T00:00:00.000Z")))));
QueryRunner runner = TestQueryRunners.makeTimeBoundaryQueryRunner(segment);
TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.timeboundary.TimeBoundaryResultValue in project druid by druid-io.
the class TimewarpOperatorTest method testPostProcess.
@Test
public void testPostProcess() {
QueryRunner<Result<TimeseriesResultValue>> queryRunner = testOperator.postProcess(new QueryRunner<Result<TimeseriesResultValue>>() {
@Override
public Sequence<Result<TimeseriesResultValue>> run(QueryPlus<Result<TimeseriesResultValue>> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-09"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-01-11"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(queryPlus.getQuery().getIntervals().get(0).getEnd(), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))));
}
}, DateTimes.of("2014-08-02").getMillis());
final Query<Result<TimeseriesResultValue>> query = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2014-07-31/2014-08-05").aggregators(Collections.singletonList(new CountAggregatorFactory("count"))).build();
Assert.assertEquals(Lists.newArrayList(new Result<>(DateTimes.of("2014-07-31"), new TimeseriesResultValue(ImmutableMap.of("metric", 2))), new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3))), new Result<>(DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)))), queryRunner.run(QueryPlus.wrap(query)).toList());
TimewarpOperator<Result<TimeBoundaryResultValue>> timeBoundaryOperator = new TimewarpOperator<>(new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), // align on Monday
DateTimes.of("2014-01-06"));
QueryRunner<Result<TimeBoundaryResultValue>> timeBoundaryRunner = timeBoundaryOperator.postProcess(new QueryRunner<Result<TimeBoundaryResultValue>>() {
@Override
public Sequence<Result<TimeBoundaryResultValue>> run(QueryPlus<Result<TimeBoundaryResultValue>> queryPlus, ResponseContext responseContext) {
return Sequences.simple(ImmutableList.of(new Result<>(DateTimes.of("2014-01-12"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", DateTimes.of("2014-01-12"))))));
}
}, DateTimes.of("2014-08-02").getMillis());
final Query<Result<TimeBoundaryResultValue>> timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("dummy").build();
Assert.assertEquals(Collections.singletonList(new Result<>(DateTimes.of("2014-08-02"), new TimeBoundaryResultValue(ImmutableMap.<String, Object>of("maxTime", DateTimes.of("2014-08-02"))))), timeBoundaryRunner.run(QueryPlus.wrap(timeBoundaryQuery)).toList());
}
use of org.apache.druid.query.timeboundary.TimeBoundaryResultValue in project druid by druid-io.
the class TimewarpOperator method postProcess.
public QueryRunner<T> postProcess(final QueryRunner<T> baseRunner, final long now) {
return new QueryRunner<T>() {
@Override
public Sequence<T> run(final QueryPlus<T> queryPlus, final ResponseContext responseContext) {
final DateTimeZone tz = queryPlus.getQuery().getTimezone();
final long offset = computeOffset(now, tz);
final Interval interval = queryPlus.getQuery().getIntervals().get(0);
final Interval modifiedInterval = new Interval(Math.min(interval.getStartMillis() + offset, now + offset), Math.min(interval.getEndMillis() + offset, now + offset), interval.getChronology());
return Sequences.map(baseRunner.run(queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(modifiedInterval)))), responseContext), new Function<T, T>() {
@Override
public T apply(T input) {
if (input instanceof Result) {
Result res = (Result) input;
Object value = res.getValue();
if (value instanceof TimeBoundaryResultValue) {
TimeBoundaryResultValue boundary = (TimeBoundaryResultValue) value;
DateTime minTime;
try {
minTime = boundary.getMinTime();
} catch (IllegalArgumentException e) {
minTime = null;
}
final DateTime maxTime = boundary.getMaxTime();
return (T) ((TimeBoundaryQuery) queryPlus.getQuery()).buildResult(DateTimes.utc(Math.min(res.getTimestamp().getMillis() - offset, now)), minTime != null ? minTime.minus(offset) : null, maxTime != null ? DateTimes.utc(Math.min(maxTime.getMillis() - offset, now)) : null).iterator().next();
}
return (T) new Result(res.getTimestamp().minus(offset), value);
} else if (input instanceof MapBasedRow) {
MapBasedRow row = (MapBasedRow) input;
return (T) new MapBasedRow(row.getTimestamp().minus(offset), row.getEvent());
}
// default to noop for unknown result types
return input;
}
});
}
};
}
use of org.apache.druid.query.timeboundary.TimeBoundaryResultValue in project druid by druid-io.
the class QueryResourceTest method testGoodQueryWithQueryConfigDoesNotOverrideQueryContext.
@Test
public void testGoodQueryWithQueryConfigDoesNotOverrideQueryContext() throws IOException {
String overrideConfigKey = "priority";
String overrideConfigValue = "678";
DefaultQueryConfig overrideConfig = new DefaultQueryConfig(ImmutableMap.of(overrideConfigKey, overrideConfigValue));
queryResource = new QueryResource(new QueryLifecycleFactory(WAREHOUSE, TEST_SEGMENT_WALKER, new DefaultGenericQueryMetricsFactory(), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), AuthTestUtils.TEST_AUTHORIZER_MAPPER, Suppliers.ofInstance(overrideConfig)), jsonMapper, smileMapper, queryScheduler, new AuthConfig(), null, ResponseContextConfig.newConfig(true), DRUID_NODE);
expectPermissiveHappyPathAuth();
Response response = queryResource.doPost(// SIMPLE_TIMESERIES_QUERY_LOW_PRIORITY context has overrideConfigKey with value of -1
new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY_LOW_PRIORITY.getBytes(StandardCharsets.UTF_8)), null, /*pretty*/
testServletRequest);
Assert.assertNotNull(response);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
((StreamingOutput) response.getEntity()).write(baos);
final List<Result<TimeBoundaryResultValue>> responses = jsonMapper.readValue(baos.toByteArray(), new TypeReference<List<Result<TimeBoundaryResultValue>>>() {
});
Assert.assertNotNull(response);
Assert.assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
Assert.assertEquals(0, responses.size());
Assert.assertEquals(1, testRequestLogger.getNativeQuerylogs().size());
Assert.assertNotNull(testRequestLogger.getNativeQuerylogs().get(0).getQuery());
Assert.assertNotNull(testRequestLogger.getNativeQuerylogs().get(0).getQuery().getContext());
Assert.assertTrue(testRequestLogger.getNativeQuerylogs().get(0).getQuery().getContext().containsKey(overrideConfigKey));
Assert.assertEquals(-1, testRequestLogger.getNativeQuerylogs().get(0).getQuery().getContext().get(overrideConfigKey));
}
Aggregations