use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggAndExpressionFilteredAgg.
@Test
public void testTimeSeriesWithFilteredAggAndExpressionFilteredAgg() {
// can't vectorize if expression
cannotVectorize();
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Lists.newArrayList(Iterables.concat(aggregatorFactoryList, ImmutableList.of(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)), new LongSumAggregatorFactory("altLongCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE), new DoubleSumAggregatorFactory("altDoubleCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE), new FloatSumAggregatorFactory("altFloatCount", null, "if (market == 'spot', 1, 0)", TestExprMacroTable.INSTANCE))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("filteredAgg", 18L).put("addRowsIndexConstant", 12486.361190795898d).put("index", 12459.361190795898d).put("uniques", 9.019833517963864d).put("rows", 26L).put("altLongCount", 18L).put("altDoubleCount", 18.0).put("altFloatCount", 18.0f).build())));
assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TimeseriesQueryRunnerTest method testTimeSeriesWithFilteredAggDimensionNotPresentNotNullValue.
@Test
public void testTimeSeriesWithFilteredAggDimensionNotPresentNotNullValue() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).intervals(QueryRunnerTestHelper.FIRST_TO_THIRD).aggregators(Lists.newArrayList(Iterables.concat(aggregatorFactoryList, Collections.singletonList(new FilteredAggregatorFactory(new CountAggregatorFactory("filteredAgg"), new SelectorDimFilter("abraKaDabra", "Lol", null)))))).postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT).descending(descending).context(makeContext()).build();
Iterable<Result<TimeseriesResultValue>> actualResults = runner.run(QueryPlus.wrap(query)).toList();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("filteredAgg", 0L, "addRowsIndexConstant", 12486.361190795898d, "index", 12459.361190795898d, "uniques", 9.019833517963864d, "rows", 26L))));
assertExpectedResults(expectedResults, actualResults);
}
use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testNumericDimensionTopNWithNullPreviousStop.
@Test
public void testNumericDimensionTopNWithNullPreviousStop() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(Granularities.ALL).dimension(QueryRunnerTestHelper.MARKET_DIMENSION).metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).threshold(2).intervals(QueryRunnerTestHelper.SECOND_ONLY).aggregators(duplicateAggregators(QueryRunnerTestHelper.ROWS_COUNT, new CountAggregatorFactory("rows1"))).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue(withDuplicateResults(Arrays.asList(ImmutableMap.of("market", "spot", "rows", 9L), ImmutableMap.of("market", "total_market", "rows", 2L)), "rows", "rows1"))));
TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
}
use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testAggregateOnLongNumericNull.
@Test
public void testAggregateOnLongNumericNull() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new DefaultDimensionSpec("longNumericNull", "dim", ColumnType.LONG)).metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).threshold(10000).aggregators(new CountAggregatorFactory("count")).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList(makeRowWithNulls("dim", NullHandling.defaultLongValue(), "count", 279L), makeRowWithNulls("dim", 10L, "count", 93L), makeRowWithNulls("dim", 20L, "count", 93L), makeRowWithNulls("dim", 40L, "count", 93L), makeRowWithNulls("dim", 50L, "count", 279L), makeRowWithNulls("dim", 70L, "count", 279L), makeRowWithNulls("dim", 80L, "count", 93L)))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.aggregation.CountAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testAggregateOnFloatNumericNull.
@Test
public void testAggregateOnFloatNumericNull() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new DefaultDimensionSpec("floatNumericNull", "dim", ColumnType.FLOAT)).metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)).threshold(10000).aggregators(new CountAggregatorFactory("count")).intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList(makeRowWithNulls("dim", NullHandling.defaultFloatValue(), "count", 279L), makeRowWithNulls("dim", 10.0f, "count", 93L), makeRowWithNulls("dim", 20.0f, "count", 93L), makeRowWithNulls("dim", 40.0f, "count", 93L), makeRowWithNulls("dim", 50.0f, "count", 279L), makeRowWithNulls("dim", 70.0f, "count", 279L), makeRowWithNulls("dim", 80.0f, "count", 93L)))));
assertExpectedResults(expectedResults, query);
}
Aggregations