use of org.apache.druid.query.aggregation.post.ArithmeticPostAggregator in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKey.
@Test
public void testResultLevelCacheKey() {
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.aggregation.post.ArithmeticPostAggregator in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKeyWithGrandTotal.
@Test
public void testResultLevelCacheKeyWithGrandTotal() {
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.aggregation.post.ArithmeticPostAggregator in project druid by druid-io.
the class TopNBinaryFnBenchmark method setUp.
@Override
protected void setUp() {
final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
final List<AggregatorFactory> aggregatorFactories = new ArrayList<>();
aggregatorFactories.add(new CountAggregatorFactory("rows"));
aggregatorFactories.add(new LongSumAggregatorFactory("index", "index"));
for (int i = 1; i < aggCount; i++) {
aggregatorFactories.add(new CountAggregatorFactory("rows" + i));
}
final List<PostAggregator> postAggregators = new ArrayList<>();
for (int i = 0; i < postAggCount; i++) {
postAggregators.add(new ArithmeticPostAggregator("addrowsindexconstant" + i, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)));
}
final DateTime currTime = DateTimes.nowUtc();
List<Map<String, Object>> list = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 1L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 1L);
}
res.put("index", 1L);
list.add(res);
}
result1 = new Result<>(currTime, new TopNResultValue(list));
List<Map<String, Object>> list2 = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 2L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 2L);
}
res.put("index", 2L);
list2.add(res);
}
result2 = new Result<>(currTime, new TopNResultValue(list2));
fn = new TopNBinaryFn(Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, aggregatorFactories, postAggregators);
}
use of org.apache.druid.query.aggregation.post.ArithmeticPostAggregator in project druid by druid-io.
the class TopNQueryQueryToolChestTest method testComputeCacheKeyWithDifferentPostAgg.
@Test
public void testComputeCacheKeyWithDifferentPostAgg() {
final TopNQuery query1 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), ImmutableList.of(new ConstantPostAggregator("post", 10)), null);
final TopNQuery query2 = new TopNQuery(new TableDataSource("dummy"), VirtualColumns.EMPTY, new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric1")))), null);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy1 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query1);
final CacheStrategy<Result<TopNResultValue>, Object, TopNQuery> strategy2 = new TopNQueryQueryToolChest(null, null).getCacheStrategy(query2);
Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(strategy1.computeResultLevelCacheKey(query1), strategy2.computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.aggregation.post.ArithmeticPostAggregator in project druid by druid-io.
the class GroupByQueryRunnerTest method testSubqueryWithMultiColumnAggregators.
@Test
public void testSubqueryWithMultiColumnAggregators() {
// Cannot vectorize due to javascript functionality.
cannotVectorize();
final GroupByQuery subquery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setDimFilter(new JavaScriptDimFilter("market", "function(dim){ return true; }", null, JavaScriptConfig.getEnabledInstance())).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx_subagg", "index"), new JavaScriptAggregatorFactory("js_agg", Arrays.asList("index", "market"), "function(current, index, dim){return current + index + dim.length;}", "function(){return 0;}", "function(a,b){return a + b;}", JavaScriptConfig.getEnabledInstance())).setPostAggregatorSpecs(Collections.singletonList(new ArithmeticPostAggregator("idx_subpostagg", "+", Arrays.asList(new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"), new ConstantPostAggregator("thousand", 1000))))).setHavingSpec(new HavingSpec() {
private GroupByQuery query;
@Override
public byte[] getCacheKey() {
return new byte[0];
}
@Override
public void setQuery(GroupByQuery query) {
this.query = query;
}
@Override
public boolean eval(ResultRow row) {
final String field = "idx_subpostagg";
final int p = query.getResultRowSignature().indexOf(field);
return (Rows.objectToNumber(field, row.get(p), true).floatValue() < 3800);
}
}).addOrderByColumn("alias").setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
final GroupByQuery query = makeQueryBuilder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("alias", "alias")).setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx_subpostagg"), new DoubleSumAggregatorFactory("js_outer_agg", "js_agg")).setPostAggregatorSpecs(Collections.singletonList(new ArithmeticPostAggregator("idx_post", "+", Arrays.asList(new FieldAccessPostAggregator("the_idx_agg", "idx"), new ConstantPostAggregator("ten_thousand", 10000))))).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.DESCENDING)), 5)).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "travel", "rows", 1L, "idx_post", 11119.0, "idx", 1119L, "js_outer_agg", 123.92274475097656), makeRow(query, "2011-04-01", "alias", "technology", "rows", 1L, "idx_post", 11078.0, "idx", 1078L, "js_outer_agg", 82.62254333496094), makeRow(query, "2011-04-01", "alias", "news", "rows", 1L, "idx_post", 11121.0, "idx", 1121L, "js_outer_agg", 125.58358001708984), makeRow(query, "2011-04-01", "alias", "health", "rows", 1L, "idx_post", 11120.0, "idx", 1120L, "js_outer_agg", 124.13470458984375), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 1L, "idx_post", 11158.0, "idx", 1158L, "js_outer_agg", 162.74722290039062));
// Subqueries are handled by the ToolChest
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "subquery-multi-aggs");
}
Aggregations