use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class TimeseriesBinaryFn method apply.
@Override
public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> arg1, Result<TimeseriesResultValue> arg2) {
if (arg1 == null) {
return arg2;
}
if (arg2 == null) {
return arg1;
}
TimeseriesResultValue arg1Val = arg1.getValue();
TimeseriesResultValue arg2Val = arg2.getValue();
Map<String, Object> retVal = new LinkedHashMap<String, Object>();
for (AggregatorFactory factory : aggregations) {
final String metricName = factory.getName();
retVal.put(metricName, factory.combine(arg1Val.getMetric(metricName), arg2Val.getMetric(metricName)));
}
return (gran instanceof AllGranularity) ? new Result<TimeseriesResultValue>(arg1.getTimestamp(), new TimeseriesResultValue(retVal)) : new Result<TimeseriesResultValue>(gran.bucketStart(arg1.getTimestamp()), new TimeseriesResultValue(retVal));
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class TimeseriesQueryQueryToolChest method getCacheStrategy.
@Override
public CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> getCacheStrategy(final TimeseriesQuery query) {
return new CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery>() {
private final List<AggregatorFactory> aggs = query.getAggregatorSpecs();
@Override
public boolean isCacheable(TimeseriesQuery query, boolean willMergeRunners) {
return true;
}
@Override
public byte[] computeCacheKey(TimeseriesQuery query) {
return new CacheKeyBuilder(TIMESERIES_QUERY).appendBoolean(query.isDescending()).appendBoolean(query.isSkipEmptyBuckets()).appendCacheable(query.getGranularity()).appendCacheable(query.getDimensionsFilter()).appendCacheablesIgnoringOrder(query.getAggregatorSpecs()).appendCacheable(query.getVirtualColumns()).build();
}
@Override
public TypeReference<Object> getCacheObjectClazz() {
return OBJECT_TYPE_REFERENCE;
}
@Override
public Function<Result<TimeseriesResultValue>, Object> prepareForCache() {
return new Function<Result<TimeseriesResultValue>, Object>() {
@Override
public Object apply(final Result<TimeseriesResultValue> input) {
TimeseriesResultValue results = input.getValue();
final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size());
retVal.add(input.getTimestamp().getMillis());
for (AggregatorFactory agg : aggs) {
retVal.add(results.getMetric(agg.getName()));
}
return retVal;
}
};
}
@Override
public Function<Object, Result<TimeseriesResultValue>> pullFromCache() {
return new Function<Object, Result<TimeseriesResultValue>>() {
private final Granularity granularity = query.getGranularity();
@Override
public Result<TimeseriesResultValue> apply(@Nullable Object input) {
List<Object> results = (List<Object>) input;
Map<String, Object> retVal = Maps.newLinkedHashMap();
Iterator<AggregatorFactory> aggsIter = aggs.iterator();
Iterator<Object> resultIter = results.iterator();
DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue());
while (aggsIter.hasNext() && resultIter.hasNext()) {
final AggregatorFactory factory = aggsIter.next();
retVal.put(factory.getName(), factory.deserialize(resultIter.next()));
}
return new Result<TimeseriesResultValue>(timestamp, new TimeseriesResultValue(retVal));
}
};
}
};
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class BaseTopNAlgorithm method makeAggregators.
public static Aggregator[] makeAggregators(Cursor cursor, List<AggregatorFactory> aggregatorSpecs) {
Aggregator[] aggregators = new Aggregator[aggregatorSpecs.size()];
int aggregatorIndex = 0;
for (AggregatorFactory spec : aggregatorSpecs) {
aggregators[aggregatorIndex] = spec.factorize(cursor);
++aggregatorIndex;
}
return aggregators;
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class BaseTopNAlgorithm method makeBufferAggregators.
protected static BufferAggregator[] makeBufferAggregators(Cursor cursor, List<AggregatorFactory> aggregatorSpecs) {
BufferAggregator[] aggregators = new BufferAggregator[aggregatorSpecs.size()];
int aggregatorIndex = 0;
for (AggregatorFactory spec : aggregatorSpecs) {
aggregators[aggregatorIndex] = spec.factorizeBuffered(cursor);
++aggregatorIndex;
}
return aggregators;
}
use of io.druid.query.aggregation.AggregatorFactory in project druid by druid-io.
the class NumericTopNMetricSpec method verifyPreconditions.
@Override
public void verifyPreconditions(List<AggregatorFactory> aggregatorSpecs, List<PostAggregator> postAggregatorSpecs) {
Preconditions.checkNotNull(metric, "metric can't be null");
Preconditions.checkNotNull(aggregatorSpecs, "aggregations cannot be null");
Preconditions.checkArgument(aggregatorSpecs.size() > 0, "Must have at least one AggregatorFactory");
final AggregatorFactory aggregator = Iterables.tryFind(aggregatorSpecs, new Predicate<AggregatorFactory>() {
@Override
public boolean apply(AggregatorFactory input) {
return input.getName().equals(metric);
}
}).orNull();
final PostAggregator postAggregator = Iterables.tryFind(postAggregatorSpecs, new Predicate<PostAggregator>() {
@Override
public boolean apply(PostAggregator input) {
return input.getName().equals(metric);
}
}).orNull();
Preconditions.checkArgument(aggregator != null || postAggregator != null, "Must have an AggregatorFactory or PostAggregator for metric[%s], gave[%s] and [%s]", metric, aggregatorSpecs, postAggregatorSpecs);
}
Aggregations