use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class CalciteArraysQueryTest method testArrayLength.
@Test
public void testArrayLength() throws Exception {
// Cannot vectorize due to usage of expressions.
cannotVectorize();
testQuery("SELECT dim1, ARRAY_LENGTH(dim3), SUM(cnt) FROM druid.numfoo GROUP BY 1, 2 ORDER BY 2 DESC", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "array_length(\"dim3\")", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("dim1", "_d0", ColumnType.STRING), new DefaultDimensionSpec("v0", "_d1", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("_d1", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), Integer.MAX_VALUE)).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "", 2, 1L }, new Object[] { "10.1", 2, 1L }, useDefault ? new Object[] { "2", 1, 1L } : new Object[] { "1", 1, 1L }, useDefault ? new Object[] { "1", 0, 1L } : new Object[] { "2", 1, 1L }, new Object[] { "abc", useDefault ? 0 : null, 1L }, new Object[] { "def", useDefault ? 0 : null, 1L }));
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class CalciteArraysQueryTest method testArrayConcat.
@Test
public void testArrayConcat() throws Exception {
// Cannot vectorize due to usage of expressions.
cannotVectorize();
ImmutableList<Object[]> results;
if (useDefault) {
results = ImmutableList.of(new Object[] { null, 3L }, new Object[] { ImmutableList.of("a", "b", "a", "b"), 1L }, new Object[] { ImmutableList.of("b", "c", "b", "c"), 1L }, new Object[] { ImmutableList.of("d", "d"), 1L });
} else {
results = ImmutableList.of(new Object[] { null, 2L }, new Object[] { ImmutableList.of("", ""), 1L }, new Object[] { ImmutableList.of("a", "b", "a", "b"), 1L }, new Object[] { ImmutableList.of("b", "c", "b", "c"), 1L }, new Object[] { ImmutableList.of("d", "d"), 1L });
}
testQuery("SELECT ARRAY_CONCAT(dim3, dim3), SUM(cnt) FROM druid.numfoo GROUP BY 1 ORDER BY 2 DESC", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "array_concat(\"dim3\",\"dim3\")", ColumnType.STRING_ARRAY)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "_d0", ColumnType.STRING_ARRAY))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("a0", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), Integer.MAX_VALUE)).setContext(QUERY_CONTEXT_NO_STRINGIFY_ARRAY).build()), results);
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testCacheStrategy.
@Test
public void testCacheStrategy() throws Exception {
CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy = TOOL_CHEST.getCacheStrategy(new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0"), new StringLastAggregatorFactory("complexMetric", "test", null, null)), ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null));
final Result<TimeseriesResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", new SerializablePairLongString(123L, "val1"))));
Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
ObjectMapper objectMapper = TestHelper.makeJsonMapper();
Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
Assert.assertEquals(result1, fromCacheResult);
final Result<TimeseriesResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
Object preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result2);
Object fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
Result<TimeseriesResultValue> fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result2, fromResultLevelCacheRes);
final Result<TimeseriesResultValue> result3 = new Result<>(// null timestamp similar to grandTotal
null, new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result3);
fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
Assert.assertEquals(result3, fromResultLevelCacheRes);
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKey.
@Test
public void testResultLevelCacheKey() {
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKeyWithGrandTotal.
@Test
public void testResultLevelCacheKeyWithGrandTotal() {
final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
Aggregations