Search in sources :

Example 61 with LongSumAggregatorFactory

use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.

the class CalciteArraysQueryTest method testArrayLength.

@Test
public void testArrayLength() throws Exception {
    // Cannot vectorize due to usage of expressions.
    cannotVectorize();
    testQuery("SELECT dim1, ARRAY_LENGTH(dim3), SUM(cnt) FROM druid.numfoo GROUP BY 1, 2 ORDER BY 2 DESC", ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "array_length(\"dim3\")", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("dim1", "_d0", ColumnType.STRING), new DefaultDimensionSpec("v0", "_d1", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("_d1", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), Integer.MAX_VALUE)).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "", 2, 1L }, new Object[] { "10.1", 2, 1L }, useDefault ? new Object[] { "2", 1, 1L } : new Object[] { "1", 1, 1L }, useDefault ? new Object[] { "1", 0, 1L } : new Object[] { "2", 1, 1L }, new Object[] { "abc", useDefault ? 0 : null, 1L }, new Object[] { "def", useDefault ? 0 : null, 1L }));
}
Also used : OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 62 with LongSumAggregatorFactory

use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.

the class CalciteArraysQueryTest method testArrayConcat.

@Test
public void testArrayConcat() throws Exception {
    // Cannot vectorize due to usage of expressions.
    cannotVectorize();
    ImmutableList<Object[]> results;
    if (useDefault) {
        results = ImmutableList.of(new Object[] { null, 3L }, new Object[] { ImmutableList.of("a", "b", "a", "b"), 1L }, new Object[] { ImmutableList.of("b", "c", "b", "c"), 1L }, new Object[] { ImmutableList.of("d", "d"), 1L });
    } else {
        results = ImmutableList.of(new Object[] { null, 2L }, new Object[] { ImmutableList.of("", ""), 1L }, new Object[] { ImmutableList.of("a", "b", "a", "b"), 1L }, new Object[] { ImmutableList.of("b", "c", "b", "c"), 1L }, new Object[] { ImmutableList.of("d", "d"), 1L });
    }
    testQuery("SELECT ARRAY_CONCAT(dim3, dim3), SUM(cnt) FROM druid.numfoo GROUP BY 1 ORDER BY 2 DESC", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "array_concat(\"dim3\",\"dim3\")", ColumnType.STRING_ARRAY)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "_d0", ColumnType.STRING_ARRAY))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"))).setLimitSpec(new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("a0", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC)), Integer.MAX_VALUE)).setContext(QUERY_CONTEXT_NO_STRINGIFY_ARRAY).build()), results);
}
Also used : OrderByColumnSpec(org.apache.druid.query.groupby.orderby.OrderByColumnSpec) DefaultLimitSpec(org.apache.druid.query.groupby.orderby.DefaultLimitSpec) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 63 with LongSumAggregatorFactory

use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy = TOOL_CHEST.getCacheStrategy(new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0"), new StringLastAggregatorFactory("complexMetric", "test", null, null)), ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null));
    final Result<TimeseriesResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", new SerializablePairLongString(123L, "val1"))));
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
    ObjectMapper objectMapper = TestHelper.makeJsonMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TimeseriesResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result1, fromCacheResult);
    final Result<TimeseriesResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
    Object preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result2);
    Object fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
    Result<TimeseriesResultValue> fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
    Assert.assertEquals(result2, fromResultLevelCacheRes);
    final Result<TimeseriesResultValue> result3 = new Result<>(// null timestamp similar to grandTotal
    null, new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
    preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result3);
    fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
    fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
    Assert.assertEquals(result3, fromResultLevelCacheRes);
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) StringLastAggregatorFactory(org.apache.druid.query.aggregation.last.StringLastAggregatorFactory) ConstantPostAggregator(org.apache.druid.query.aggregation.post.ConstantPostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SerializablePairLongString(org.apache.druid.query.aggregation.SerializablePairLongString) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 64 with LongSumAggregatorFactory

use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKey.

@Test
public void testResultLevelCacheKey() {
    final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
    final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
    Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
    Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
Also used : ArithmeticPostAggregator(org.apache.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FieldAccessPostAggregator) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Test(org.junit.Test)

Example 65 with LongSumAggregatorFactory

use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKeyWithGrandTotal.

@Test
public void testResultLevelCacheKeyWithGrandTotal() {
    final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
    final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
    Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
    Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
Also used : ArithmeticPostAggregator(org.apache.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FieldAccessPostAggregator) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Test(org.junit.Test)

Aggregations

LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)392 Test (org.junit.Test)347 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)229 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)193 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)79 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)79 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)60 Result (org.apache.druid.query.Result)43 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)43 ArrayList (java.util.ArrayList)36 HashMap (java.util.HashMap)36 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)36 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)36 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)36 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)31 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)31 PeriodGranularity (org.apache.druid.java.util.common.granularity.PeriodGranularity)30 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)30 Period (org.joda.time.Period)29 QueryRunner (org.apache.druid.query.QueryRunner)27