use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryRunnerBonusTest method testOneRowAtATime.
@Test
public void testOneRowAtATime() throws Exception {
final IncrementalIndex oneRowIndex = new OnheapIncrementalIndex.Builder().setIndexSchema(new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2012-01-01T00:00:00Z").getMillis()).build()).setMaxRowCount(1000).build();
List<Result<TimeseriesResultValue>> results;
oneRowIndex.add(new MapBasedInputRow(DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x")));
results = runTimeseriesCount(oneRowIndex);
Assert.assertEquals("index size", 1, oneRowIndex.size());
Assert.assertEquals("result size", 1, results.size());
Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows"));
oneRowIndex.add(new MapBasedInputRow(DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "y")));
results = runTimeseriesCount(oneRowIndex);
Assert.assertEquals("index size", 2, oneRowIndex.size());
Assert.assertEquals("result size", 1, results.size());
Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows"));
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeseriesQueryEngine method process.
/**
* Run a single-segment, single-interval timeseries query on a particular adapter. The query must have been
* scoped down to a single interval before calling this method.
*/
public Sequence<Result<TimeseriesResultValue>> process(final TimeseriesQuery query, final StorageAdapter adapter) {
if (adapter == null) {
throw new SegmentMissingException("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getFilter()));
final Interval interval = Iterables.getOnlyElement(query.getIntervals());
final Granularity gran = query.getGranularity();
final boolean descending = query.isDescending();
final ColumnInspector inspector = query.getVirtualColumns().wrapInspector(adapter);
final boolean doVectorize = QueryContexts.getVectorize(query).shouldVectorize(adapter.canVectorize(filter, query.getVirtualColumns(), descending) && VirtualColumns.shouldVectorize(query, query.getVirtualColumns(), adapter) && query.getAggregatorSpecs().stream().allMatch(aggregatorFactory -> aggregatorFactory.canVectorize(inspector)));
final Sequence<Result<TimeseriesResultValue>> result;
if (doVectorize) {
result = processVectorized(query, adapter, filter, interval, gran, descending);
} else {
result = processNonVectorized(query, adapter, filter, interval, gran, descending);
}
final int limit = query.getLimit();
if (limit < Integer.MAX_VALUE) {
return result.limit(limit);
} else {
return result;
}
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeBoundaryQueryRunnerTest method testTimeBoundaryMax.
@Test
@SuppressWarnings("unchecked")
public void testTimeBoundaryMax() {
TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder().dataSource("testing").bound(TimeBoundaryQuery.MAX_TIME).build();
ResponseContext context = ConcurrentResponseContext.createEmpty();
context.initializeMissingSegments();
Iterable<Result<TimeBoundaryResultValue>> results = runner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList();
TimeBoundaryResultValue val = results.iterator().next().getValue();
DateTime minTime = val.getMinTime();
DateTime maxTime = val.getMaxTime();
Assert.assertNull(minTime);
Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime);
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeBoundaryQueryRunnerTest method testMergeResults.
@Test
public void testMergeResults() {
List<Result<TimeBoundaryResultValue>> results = Arrays.asList(new Result<>(DateTimes.nowUtc(), new TimeBoundaryResultValue(ImmutableMap.of("maxTime", "2012-01-01", "minTime", "2011-01-01"))), new Result<>(DateTimes.nowUtc(), new TimeBoundaryResultValue(ImmutableMap.of("maxTime", "2012-02-01", "minTime", "2011-01-01"))));
TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null);
Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(DateTimes.of("2012-02-01")));
}
use of org.apache.druid.query.Result in project druid by druid-io.
the class TimeBoundaryQueryRunnerTest method testMergeResultsEmptyResults.
@Test
public void testMergeResultsEmptyResults() {
List<Result<TimeBoundaryResultValue>> results = new ArrayList<>();
TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null);
Iterable<Result<TimeBoundaryResultValue>> actual = query.mergeResults(results);
Assert.assertFalse(actual.iterator().hasNext());
}
Aggregations