use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class GroupByQueryRunnerTest method testSubqueryWithPostAggregatorsAndHaving.
@Test
public void testSubqueryWithPostAggregatorsAndHaving() {
final GroupByQuery subquery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx_subagg", "index")).setPostAggregatorSpecs(Collections.singletonList(new ArithmeticPostAggregator("idx_subpostagg", "+", Arrays.asList(new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"), new ConstantPostAggregator("thousand", 1000))))).setHavingSpec(new HavingSpec() {
private GroupByQuery query;
@Override
public void setQuery(GroupByQuery query) {
this.query = query;
}
@Override
public byte[] getCacheKey() {
return new byte[0];
}
@Override
public boolean eval(ResultRow row) {
final String field = "idx_subpostagg";
final int p = query.getResultRowSignature().indexOf(field);
return (Rows.objectToNumber(field, row.get(p), true).floatValue() < 3800);
}
}).addOrderByColumn("alias").setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
final GroupByQuery query = makeQueryBuilder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("alias", "alias")).setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx_subpostagg")).setPostAggregatorSpecs(Collections.singletonList(new ArithmeticPostAggregator("idx_post", "+", Arrays.asList(new FieldAccessPostAggregator("the_idx_agg", "idx"), new ConstantPostAggregator("ten_thousand", 10000))))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "automotive", "rows", 1L, "idx_post", 11135.0, "idx", 1135L), makeRow(query, "2011-04-01", "alias", "business", "rows", 1L, "idx_post", 11118.0, "idx", 1118L), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 1L, "idx_post", 11158.0, "idx", 1158L), makeRow(query, "2011-04-01", "alias", "health", "rows", 1L, "idx_post", 11120.0, "idx", 1120L), makeRow(query, "2011-04-01", "alias", "news", "rows", 1L, "idx_post", 11121.0, "idx", 1121L), makeRow(query, "2011-04-01", "alias", "technology", "rows", 1L, "idx_post", 11078.0, "idx", 1078L), makeRow(query, "2011-04-01", "alias", "travel", "rows", 1L, "idx_post", 11119.0, "idx", 1119L), makeRow(query, "2011-04-02", "alias", "automotive", "rows", 1L, "idx_post", 11147.0, "idx", 1147L), makeRow(query, "2011-04-02", "alias", "business", "rows", 1L, "idx_post", 11112.0, "idx", 1112L), makeRow(query, "2011-04-02", "alias", "entertainment", "rows", 1L, "idx_post", 11166.0, "idx", 1166L), makeRow(query, "2011-04-02", "alias", "health", "rows", 1L, "idx_post", 11113.0, "idx", 1113L), makeRow(query, "2011-04-02", "alias", "mezzanine", "rows", 3L, "idx_post", 13447.0, "idx", 3447L), makeRow(query, "2011-04-02", "alias", "news", "rows", 1L, "idx_post", 11114.0, "idx", 1114L), makeRow(query, "2011-04-02", "alias", "premium", "rows", 3L, "idx_post", 13505.0, "idx", 3505L), makeRow(query, "2011-04-02", "alias", "technology", "rows", 1L, "idx_post", 11097.0, "idx", 1097L), makeRow(query, "2011-04-02", "alias", "travel", "rows", 1L, "idx_post", 11126.0, "idx", 1126L));
// Subqueries are handled by the ToolChest
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "subquery-postaggs");
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class AggregatorUtilTest method testPruneDependentPostAgg.
@Test
public void testPruneDependentPostAgg() {
PostAggregator agg1 = new ArithmeticPostAggregator("abc", "+", Lists.newArrayList(new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("2", 2L)));
PostAggregator dependency1 = new ArithmeticPostAggregator("dep1", "+", Lists.newArrayList(new ConstantPostAggregator("1", 1L), new ConstantPostAggregator("4", 4L)));
PostAggregator agg2 = new FieldAccessPostAggregator("def", "def");
PostAggregator dependency2 = new FieldAccessPostAggregator("dep2", "dep2");
PostAggregator aggregator = new ArithmeticPostAggregator("finalAgg", "+", Lists.newArrayList(new FieldAccessPostAggregator("dep1", "dep1"), new FieldAccessPostAggregator("dep2", "dep2")));
List<PostAggregator> prunedAgg = AggregatorUtil.pruneDependentPostAgg(Lists.newArrayList(agg1, dependency1, agg2, dependency2, aggregator), aggregator.getName());
Assert.assertEquals(Lists.newArrayList(dependency1, dependency2, aggregator), prunedAgg);
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class GroupByQueryQueryToolChestTest method testResultSerde.
@Test
public void testResultSerde() throws Exception {
final GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(Collections.singletonList(DefaultDimensionSpec.of("test"))).setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)).setPostAggregatorSpecs(Collections.singletonList(new ConstantPostAggregator("post", 10))).setGranularity(QueryRunnerTestHelper.DAY_GRAN).build();
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(null);
final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
final ObjectMapper arraysObjectMapper = toolChest.decorateObjectMapper(objectMapper, query.withOverriddenContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, true)));
final ObjectMapper mapsObjectMapper = toolChest.decorateObjectMapper(objectMapper, query.withOverriddenContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_ARRAY_RESULT_ROWS, false)));
final Object[] rowObjects = { DateTimes.of("2000").getMillis(), "foo", 100, 10.0 };
final ResultRow resultRow = ResultRow.of(rowObjects);
Assert.assertEquals(resultRow, arraysObjectMapper.readValue(StringUtils.format("[%s, \"foo\", 100, 10.0]", DateTimes.of("2000").getMillis()), ResultRow.class));
TestHelper.assertRow("", resultRow, arraysObjectMapper.readValue(StringUtils.format("{\"version\":\"v1\"," + "\"timestamp\":\"%s\"," + "\"event\":" + " {\"test\":\"foo\", \"rows\":100, \"post\":10.0}" + "}", DateTimes.of("2000")), ResultRow.class));
Assert.assertArrayEquals(rowObjects, objectMapper.readValue(arraysObjectMapper.writeValueAsBytes(resultRow), Object[].class));
Assert.assertEquals(resultRow.toMapBasedRow(query), objectMapper.readValue(mapsObjectMapper.writeValueAsBytes(resultRow), Row.class));
Assert.assertEquals("arrays read arrays", resultRow, arraysObjectMapper.readValue(arraysObjectMapper.writeValueAsBytes(resultRow), ResultRow.class));
Assert.assertEquals("arrays read maps", resultRow, arraysObjectMapper.readValue(mapsObjectMapper.writeValueAsBytes(resultRow), ResultRow.class));
Assert.assertEquals("maps read arrays", resultRow, mapsObjectMapper.readValue(arraysObjectMapper.writeValueAsBytes(resultRow), ResultRow.class));
Assert.assertEquals("maps read maps", resultRow, mapsObjectMapper.readValue(mapsObjectMapper.writeValueAsBytes(resultRow), ResultRow.class));
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy = TOOL_CHEST.getCacheStrategy(new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0"), new StringLastAggregatorFactory("complexMetric", "test", null, null)), ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null));
final Result<TimeseriesResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", new SerializablePairLongString(123L, "val1"))));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result1, fromCacheResult);
final Result<TimeseriesResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
Object preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result2);
Object fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result2, fromResultLevelCacheRes);
final Result<TimeseriesResultValue> result3 = new Result<>(// null timestamp similar to grandTotal
null, new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result3);
fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result3, fromResultLevelCacheRes);
}
use of org.apache.druid.query.aggregation.post.ConstantPostAggregator in project druid by druid-io.
the class TopNBinaryFnBenchmark method setUp.
@Override
protected void setUp() {
final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
final List<AggregatorFactory> aggregatorFactories = new ArrayList<>();
aggregatorFactories.add(new CountAggregatorFactory("rows"));
aggregatorFactories.add(new LongSumAggregatorFactory("index", "index"));
for (int i = 1; i < aggCount; i++) {
aggregatorFactories.add(new CountAggregatorFactory("rows" + i));
}
final List<PostAggregator> postAggregators = new ArrayList<>();
for (int i = 0; i < postAggCount; i++) {
postAggregators.add(new ArithmeticPostAggregator("addrowsindexconstant" + i, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)));
}
final DateTime currTime = DateTimes.nowUtc();
List<Map<String, Object>> list = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 1L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 1L);
}
res.put("index", 1L);
list.add(res);
}
result1 = new Result<>(currTime, new TopNResultValue(list));
List<Map<String, Object>> list2 = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 2L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 2L);
}
res.put("index", 2L);
list2.add(res);
}
result2 = new Result<>(currTime, new TopNResultValue(list2));
fn = new TopNBinaryFn(Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, aggregatorFactories, postAggregators);
}
Aggregations