Search in sources :

Example 41 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class CalciteArraysQueryTest method testGroupByArrayFromCase.

@Test
public void testGroupByArrayFromCase() throws Exception {
    cannotVectorize();
    testQuery("SELECT CASE WHEN dim4 = 'a' THEN ARRAY['foo','bar','baz'] END as mv_value, count(1) from numfoo GROUP BY 1", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setVirtualColumns(expressionVirtualColumn("v0", "case_searched((\"dim4\" == 'a'),array('foo','bar','baz'),null)", ColumnType.STRING_ARRAY)).setDimensions(new DefaultDimensionSpec("v0", "_d0", ColumnType.STRING_ARRAY)).setGranularity(Granularities.ALL).setAggregatorSpecs(new CountAggregatorFactory("a0")).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { null, 3L }, new Object[] { ImmutableList.of("foo", "bar", "baz"), 3L }));
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 42 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testCacheStrategy.

@Test
public void testCacheStrategy() throws Exception {
    CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> strategy = TOOL_CHEST.getCacheStrategy(new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1"), new LongSumAggregatorFactory("metric0", "metric0"), new StringLastAggregatorFactory("complexMetric", "test", null, null)), ImmutableList.of(new ConstantPostAggregator("post", 10)), 0, null));
    final Result<TimeseriesResultValue> result1 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", new SerializablePairLongString(123L, "val1"))));
    Object preparedValue = strategy.prepareForSegmentLevelCache().apply(result1);
    ObjectMapper objectMapper = TestHelper.makeJsonMapper();
    Object fromCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedValue), strategy.getCacheObjectClazz());
    Result<TimeseriesResultValue> fromCacheResult = strategy.pullFromSegmentLevelCache().apply(fromCacheValue);
    Assert.assertEquals(result1, fromCacheResult);
    final Result<TimeseriesResultValue> result2 = new Result<>(// test timestamps that result in integer size millis
    DateTimes.utc(123L), new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
    Object preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result2);
    Object fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
    Result<TimeseriesResultValue> fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
    Assert.assertEquals(result2, fromResultLevelCacheRes);
    final Result<TimeseriesResultValue> result3 = new Result<>(// null timestamp similar to grandTotal
    null, new TimeseriesResultValue(ImmutableMap.of("metric1", 2, "metric0", 3, "complexMetric", "val1", "post", 10)));
    preparedResultLevelCacheValue = strategy.prepareForCache(true).apply(result3);
    fromResultLevelCacheValue = objectMapper.readValue(objectMapper.writeValueAsBytes(preparedResultLevelCacheValue), strategy.getCacheObjectClazz());
    fromResultLevelCacheRes = strategy.pullFromCache(true).apply(fromResultLevelCacheValue);
    Assert.assertEquals(result3, fromResultLevelCacheRes);
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) StringLastAggregatorFactory(org.apache.druid.query.aggregation.last.StringLastAggregatorFactory) ConstantPostAggregator(org.apache.druid.query.aggregation.post.ConstantPostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) Result(org.apache.druid.query.Result) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SerializablePairLongString(org.apache.druid.query.aggregation.SerializablePairLongString) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 43 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKey.

@Test
public void testResultLevelCacheKey() {
    final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
    final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).build();
    Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
    Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
Also used : ArithmeticPostAggregator(org.apache.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FieldAccessPostAggregator) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Test(org.junit.Test)

Example 44 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryQueryToolChestTest method testResultLevelCacheKeyWithGrandTotal.

@Test
public void testResultLevelCacheKeyWithGrandTotal() {
    final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "+", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
    final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder().dataSource("dummy").intervals("2015-01-01/2015-01-02").descending(descending).granularity(Granularities.ALL).aggregators(ImmutableList.of(new LongSumAggregatorFactory("metric0", "metric0"), new CountAggregatorFactory("metric1"))).postAggregators(ImmutableList.of(new ArithmeticPostAggregator("post", "/", ImmutableList.of(new FieldAccessPostAggregator(null, "metric1"), new FieldAccessPostAggregator(null, "metric0"))))).context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)).build();
    Assert.assertTrue(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeCacheKey(query2)));
    Assert.assertFalse(Arrays.equals(TOOL_CHEST.getCacheStrategy(query1).computeResultLevelCacheKey(query1), TOOL_CHEST.getCacheStrategy(query2).computeResultLevelCacheKey(query2)));
}
Also used : ArithmeticPostAggregator(org.apache.druid.query.aggregation.post.ArithmeticPostAggregator) FieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FieldAccessPostAggregator) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) Test(org.junit.Test)

Example 45 with CountAggregatorFactory

use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.

the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggInvertedNullValue.

@Test
public void testTimeSeriesWithFilteredAggInvertedNullValue() {
    TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Lists.newArrayList(Iterables.concat(aggregatorFactoryList, Collections.singletonList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new NotDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, null, null))))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
    Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
    List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("filteredAgg", 26L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
    assertExpectedResults(expectedResults, actualResults);
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Aggregations

CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)300 Test (org.junit.Test)249 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)103 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)81 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)62 TimeseriesQuery (org.apache.druid.query.timeseries.TimeseriesQuery)50 TableDataSource (org.apache.druid.query.TableDataSource)44 QueryDataSource (org.apache.druid.query.QueryDataSource)41 TimeseriesQueryQueryToolChest (org.apache.druid.query.timeseries.TimeseriesQueryQueryToolChest)40 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)37 Result (org.apache.druid.query.Result)36 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)35 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)30 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)30 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)28 IncrementalIndexSegment (org.apache.druid.segment.IncrementalIndexSegment)27 DimensionsSpec (org.apache.druid.data.input.impl.DimensionsSpec)26 GroupByQueryRunnerTest (org.apache.druid.query.groupby.GroupByQueryRunnerTest)26 IncrementalIndex (org.apache.druid.segment.incremental.IncrementalIndex)26 Parameters (junitparams.Parameters)24