use of io.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithOrderLimitHavingSpec.
@Test
public void testGroupByWithOrderLimitHavingSpec() {
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-01-25/2011-01-28").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("index", "index"))).setGranularity(Granularities.ALL).setHavingSpec(new GreaterThanHavingSpec("index", 310L)).setLimitSpec(new DefaultLimitSpec(Lists.newArrayList(new OrderByColumnSpec("index", OrderByColumnSpec.Direction.ASCENDING)), 5));
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-25", "alias", "business", "rows", 3L, "index", 312.38165283203125), GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-25", "alias", "news", "rows", 3L, "index", 312.7834167480469), GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-25", "alias", "technology", "rows", 3L, "index", 324.6412353515625), GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-25", "alias", "travel", "rows", 3L, "index", 393.36322021484375), GroupByQueryRunnerTestHelper.createExpectedRow("2011-01-25", "alias", "health", "rows", 3L, "index", 511.2996826171875));
GroupByQuery fullQuery = builder.build();
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery);
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
use of io.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithOrderLimit3.
@Test
public void testGroupByWithOrderLimit3() throws Exception {
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("idx", "index"))).addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING).addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
GroupByQuery query = builder.build();
List<Row> expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(new String[] { "__time", "alias", "rows", "idx" }, new Object[] { "2011-04-01", "mezzanine", 6L, 4423.6533203125D }, new Object[] { "2011-04-01", "premium", 6L, 4418.61865234375D }, new Object[] { "2011-04-01", "entertainment", 2L, 319.94403076171875D }, new Object[] { "2011-04-01", "automotive", 2L, 270.3977966308594D }, new Object[] { "2011-04-01", "travel", 2L, 243.65843200683594D }, new Object[] { "2011-04-01", "news", 2L, 222.20980834960938D }, new Object[] { "2011-04-01", "business", 2L, 218.7224884033203D }, new Object[] { "2011-04-01", "health", 2L, 216.97836303710938D }, new Object[] { "2011-04-01", "technology", 2L, 178.24917602539062D });
Map<String, Object> context = Maps.newHashMap();
QueryRunner<Row> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited");
}
use of io.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByOrderLimit.
@Test
public void testGroupByOrderLimit() throws Exception {
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).addOrderByColumn("rows").addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING).setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build();
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 2L, "idx", 221L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 2L, "idx", 216L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L));
Map<String, Object> context = Maps.newHashMap();
QueryRunner<Row> mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited");
// Now try it with an expression based aggregator.
builder.limit(Integer.MAX_VALUE).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("idx", null, "index / 2 + indexMin")));
expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(new String[] { "__time", "alias", "rows", "idx" }, new Object[] { "2011-04-01", "travel", 2L, 365.4876403808594D }, new Object[] { "2011-04-01", "technology", 2L, 267.3737487792969D }, new Object[] { "2011-04-01", "news", 2L, 333.3147277832031D }, new Object[] { "2011-04-01", "health", 2L, 325.467529296875D }, new Object[] { "2011-04-01", "entertainment", 2L, 479.916015625D }, new Object[] { "2011-04-01", "business", 2L, 328.083740234375D }, new Object[] { "2011-04-01", "automotive", 2L, 405.5966796875D }, new Object[] { "2011-04-01", "premium", 6L, 6627.927734375D }, new Object[] { "2011-04-01", "mezzanine", 6L, 6635.47998046875D });
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(builder.build(), context), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited");
// Now try it with an expression virtual column.
builder.limit(Integer.MAX_VALUE).setVirtualColumns(new ExpressionVirtualColumn("expr", "index / 2 + indexMin")).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("idx", "expr")));
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(builder.build(), context), "no-limit");
TestHelper.assertExpectedObjects(Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited");
}
use of io.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsConflict.
@Test
public void testMergeAggregatorsConflict() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleMaxAggregatorFactory("bar", "bar"), "baz", new LongMaxAggregatorFactory("baz", "baz")), null, null, null);
final Map<String, AggregatorFactory> expectedLenient = Maps.newHashMap();
expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo"));
expectedLenient.put("bar", null);
expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz"));
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators());
// Simulate multi-level merge
Assert.assertEquals(expectedLenient, mergeLenient(mergeLenient(analysis1, analysis2), mergeLenient(analysis1, analysis2)).getAggregators());
}
use of io.druid.query.aggregation.DoubleSumAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsOneNull.
@Test
public void testMergeAggregatorsOneNull() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, null, null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), mergeLenient(analysis1, analysis2).getAggregators());
}
Aggregations