use of io.druid.query.Result in project druid by druid-io.
the class RealtimeIndexTaskTest method sumMetric.
public long sumMetric(final Task task, final String metric) throws Exception {
// Do a query.
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test_ds").aggregators(ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory(metric, metric))).granularity(Granularities.ALL).intervals("2000/3000").build();
ArrayList<Result<TimeseriesResultValue>> results = Sequences.toList(task.getQueryRunner(query).run(query, ImmutableMap.<String, Object>of()), Lists.<Result<TimeseriesResultValue>>newArrayList());
return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
}
use of io.druid.query.Result in project druid by druid-io.
the class SearchBinaryFn method apply.
@Override
public Result<SearchResultValue> apply(Result<SearchResultValue> arg1, Result<SearchResultValue> arg2) {
if (arg1 == null) {
return arg2;
}
if (arg2 == null) {
return arg1;
}
final int limit = gran instanceof AllGranularity ? this.limit : -1;
SearchResultValue arg1Vals = arg1.getValue();
SearchResultValue arg2Vals = arg2.getValue();
Iterable<SearchHit> merged = Iterables.mergeSorted(Arrays.asList(arg1Vals, arg2Vals), searchSortSpec.getComparator());
int maxSize = arg1Vals.getValue().size() + arg2Vals.getValue().size();
if (limit > 0) {
maxSize = Math.min(limit, maxSize);
}
List<SearchHit> results = Lists.newArrayListWithExpectedSize(maxSize);
SearchHit prev = null;
for (SearchHit searchHit : merged) {
if (prev == null) {
prev = searchHit;
continue;
}
if (prev.equals(searchHit)) {
if (prev.getCount() != null && searchHit.getCount() != null) {
prev = new SearchHit(prev.getDimension(), prev.getValue(), prev.getCount() + searchHit.getCount());
} else {
prev = new SearchHit(prev.getDimension(), prev.getValue());
}
} else {
results.add(prev);
prev = searchHit;
if (limit > 0 && results.size() >= limit) {
break;
}
}
}
if (prev != null && (limit < 0 || results.size() < limit)) {
results.add(prev);
}
final DateTime timestamp = gran instanceof AllGranularity ? arg1.getTimestamp() : gran.bucketStart(arg1.getTimestamp());
return new Result<SearchResultValue>(timestamp, new SearchResultValue(results));
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryEngine method query.
public Sequence<Result<TopNResultValue>> query(final TopNQuery query, final StorageAdapter adapter) {
if (adapter == null) {
throw new SegmentMissingException("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
final List<Interval> queryIntervals = query.getQuerySegmentSpec().getIntervals();
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
final Granularity granularity = query.getGranularity();
final Function<Cursor, Result<TopNResultValue>> mapFn = getMapFn(query, adapter);
Preconditions.checkArgument(queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals);
return Sequences.filter(Sequences.map(adapter.makeCursors(filter, queryIntervals.get(0), query.getVirtualColumns(), granularity, query.isDescending()), new Function<Cursor, Result<TopNResultValue>>() {
@Override
public Result<TopNResultValue> apply(Cursor input) {
log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
return mapFn.apply(input);
}
}), Predicates.<Result<TopNResultValue>>notNull());
}
use of io.druid.query.Result in project druid by druid-io.
the class TopNQueryQueryToolChest method makePostComputeManipulatorFn.
@Override
public Function<Result<TopNResultValue>, Result<TopNResultValue>> makePostComputeManipulatorFn(final TopNQuery query, final MetricManipulationFn fn) {
return new Function<Result<TopNResultValue>, Result<TopNResultValue>>() {
private String dimension = query.getDimensionSpec().getOutputName();
private final AggregatorFactory[] aggregatorFactories = query.getAggregatorSpecs().toArray(new AggregatorFactory[0]);
private final String[] aggFactoryNames = extractFactoryName(query.getAggregatorSpecs());
private final PostAggregator[] postAggregators = query.getPostAggregatorSpecs().toArray(new PostAggregator[0]);
@Override
public Result<TopNResultValue> apply(Result<TopNResultValue> result) {
List<Map<String, Object>> serializedValues = Lists.newArrayList(Iterables.transform(result.getValue(), new Function<DimensionAndMetricValueExtractor, Map<String, Object>>() {
@Override
public Map<String, Object> apply(DimensionAndMetricValueExtractor input) {
final Map<String, Object> values = Maps.newHashMapWithExpectedSize(aggregatorFactories.length + query.getPostAggregatorSpecs().size() + 1);
for (int i = 0; i < aggFactoryNames.length; ++i) {
final String name = aggFactoryNames[i];
values.put(name, input.getMetric(name));
}
for (PostAggregator postAgg : postAggregators) {
Object calculatedPostAgg = input.getMetric(postAgg.getName());
if (calculatedPostAgg != null) {
values.put(postAgg.getName(), calculatedPostAgg);
} else {
values.put(postAgg.getName(), postAgg.compute(values));
}
}
for (int i = 0; i < aggFactoryNames.length; ++i) {
final String name = aggFactoryNames[i];
values.put(name, fn.manipulate(aggregatorFactories[i], input.getMetric(name)));
}
values.put(dimension, input.getDimensionValue(dimension));
return values;
}
}));
return new Result<>(result.getTimestamp(), new TopNResultValue(serializedValues));
}
};
}
use of io.druid.query.Result in project druid by druid-io.
the class SelectQueryEngine method process.
public Sequence<Result<SelectResultValue>> process(final SelectQuery query, final Segment segment) {
final StorageAdapter adapter = segment.asStorageAdapter();
if (adapter == null) {
throw new ISE("Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
}
// at the point where this code is called, only one datasource should exist.
String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames());
final Iterable<DimensionSpec> dims;
if (query.getDimensions() == null || query.getDimensions().isEmpty()) {
dims = DefaultDimensionSpec.toSpec(adapter.getAvailableDimensions());
} else {
dims = query.getDimensions();
}
final Iterable<String> metrics;
if (query.getMetrics() == null || query.getMetrics().isEmpty()) {
metrics = adapter.getAvailableMetrics();
} else {
metrics = query.getMetrics();
}
List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
// should be rewritten with given interval
final String segmentId = DataSegmentUtils.withInterval(dataSource, segment.getIdentifier(), intervals.get(0));
final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
return QueryRunnerHelper.makeCursorBasedQuery(adapter, query.getQuerySegmentSpec().getIntervals(), filter, query.getVirtualColumns(), query.isDescending(), query.getGranularity(), new Function<Cursor, Result<SelectResultValue>>() {
@Override
public Result<SelectResultValue> apply(Cursor cursor) {
final SelectResultValueBuilder builder = new SelectResultValueBuilder(cursor.getTime(), query.getPagingSpec(), query.isDescending());
final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME);
final List<ColumnSelectorPlus<SelectColumnSelectorStrategy>> selectorPlusList = Arrays.asList(DimensionHandlerUtils.createColumnSelectorPluses(STRATEGY_FACTORY, Lists.newArrayList(dims), cursor));
for (DimensionSpec dimSpec : dims) {
builder.addDimension(dimSpec.getOutputName());
}
final Map<String, ObjectColumnSelector> metSelectors = Maps.newHashMap();
for (String metric : metrics) {
final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
metSelectors.put(metric, metricSelector);
builder.addMetric(metric);
}
final PagingOffset offset = query.getPagingOffset(segmentId);
cursor.advanceTo(offset.startDelta());
int lastOffset = offset.startOffset();
for (; !cursor.isDone() && offset.hasNext(); cursor.advance(), offset.next()) {
final Map<String, Object> theEvent = singleEvent(EventHolder.timestampKey, timestampColumnSelector, selectorPlusList, metSelectors);
builder.addEntry(new EventHolder(segmentId, lastOffset = offset.current(), theEvent));
}
builder.finished(segmentId, lastOffset);
return builder.build();
}
});
}
Aggregations