use of io.druid.query.aggregation.PostAggregator in project druid by druid-io.
the class DoubleLeastPostAggregatorTest method testCompute.
@Test
public void testCompute() {
final String aggName = "rows";
DoubleLeastPostAggregator leastPostAggregator;
CountAggregator agg = new CountAggregator();
agg.aggregate();
agg.aggregate();
agg.aggregate();
Map<String, Object> metricValues = new HashMap<String, Object>();
metricValues.put(aggName, agg.get());
List<PostAggregator> postAggregatorList = Lists.newArrayList(new ConstantPostAggregator("roku", 6D), new FieldAccessPostAggregator("rows", aggName));
leastPostAggregator = new DoubleLeastPostAggregator("least", postAggregatorList);
Assert.assertEquals(3.0, leastPostAggregator.compute(metricValues));
}
use of io.druid.query.aggregation.PostAggregator in project druid by druid-io.
the class ArithmeticPostAggregatorTest method testCompute.
@Test
public void testCompute() {
final String aggName = "rows";
ArithmeticPostAggregator arithmeticPostAggregator;
ExpressionPostAggregator expressionPostAggregator;
CountAggregator agg = new CountAggregator();
agg.aggregate();
agg.aggregate();
agg.aggregate();
Map<String, Object> metricValues = new HashMap<String, Object>();
metricValues.put(aggName, agg.get());
List<PostAggregator> postAggregatorList = Lists.newArrayList(new ConstantPostAggregator("roku", 6D), new FieldAccessPostAggregator("rows", "rows"));
for (PostAggregator postAggregator : postAggregatorList) {
metricValues.put(postAggregator.getName(), postAggregator.compute(metricValues));
}
arithmeticPostAggregator = new ArithmeticPostAggregator("add", "+", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku + rows");
Assert.assertEquals(9.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(9.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("subtract", "-", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku - rows");
Assert.assertEquals(3.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(3.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("multiply", "*", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku * rows");
Assert.assertEquals(18.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(18.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("divide", "/", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku / rows");
Assert.assertEquals(2.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(2.0, expressionPostAggregator.compute(metricValues));
}
use of io.druid.query.aggregation.PostAggregator in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergedPostAggHavingSpec.
@Test
public void testMergedPostAggHavingSpec() {
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, "rows_times_10", 20.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, "rows_times_10", 60.0), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, "rows_times_10", 60.0));
GroupByQuery.Builder builder = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setInterval("2011-04-02/2011-04-04").setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setPostAggregatorSpecs(Arrays.<PostAggregator>asList(new ArithmeticPostAggregator("rows_times_10", "*", Arrays.<PostAggregator>asList(new FieldAccessPostAggregator("rows", "rows"), new ConstantPostAggregator("const", 10L))))).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(new OrHavingSpec(ImmutableList.<HavingSpec>of(new GreaterThanHavingSpec("rows_times_10", 20L), new EqualToHavingSpec("idx", 217L))));
GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<Row>() {
@Override
public Sequence<Row> run(Query<Row> query, Map<String, Object> responseContext) {
// simulate two daily segments
final Query query1 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))));
final Query query2 = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))));
return new MergeSequence(query.getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(query1, responseContext), runner.run(query2, responseContext))));
}
});
Map<String, Object> context = Maps.newHashMap();
// add an extra layer of merging, simulate broker forwarding query to historical
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(fullQuery, context), "merged");
fullQuery = fullQuery.withPostAggregatorSpecs(Arrays.<PostAggregator>asList(new ExpressionPostAggregator("rows_times_10", "rows * 10.0")));
TestHelper.assertExpectedObjects(expectedResults, factory.getToolchest().postMergeQueryDecoration(factory.getToolchest().mergeResults(factory.getToolchest().preMergeQueryDecoration(mergedRunner))).run(fullQuery, context), "merged");
}
use of io.druid.query.aggregation.PostAggregator in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByWithHavingOnFinalizedHyperUnique.
@Test
public void testGroupByWithHavingOnFinalizedHyperUnique() {
GroupByQuery query = new GroupByQuery.Builder().setDataSource(QueryRunnerTestHelper.dataSource).setGranularity(QueryRunnerTestHelper.allGran).setDimensions(Arrays.<DimensionSpec>asList(new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension))).setInterval(QueryRunnerTestHelper.fullOnInterval).setLimitSpec(new DefaultLimitSpec(Lists.newArrayList(new OrderByColumnSpec(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, OrderByColumnSpec.Direction.DESCENDING)), 3)).setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 8)).setAggregatorSpecs(Lists.<AggregatorFactory>newArrayList(QueryRunnerTestHelper.qualityUniques)).setPostAggregatorSpecs(Lists.<PostAggregator>newArrayList(new HyperUniqueFinalizingPostAggregator(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.uniqueMetric))).build();
List<Row> expectedResults = Arrays.asList(GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "market", "spot", QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_9, QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_9));
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "order-limit");
}
use of io.druid.query.aggregation.PostAggregator in project druid by druid-io.
the class SchemalessTestFullTest method testFilteredTopN.
private void testFilteredTopN(QueryRunner runner, List<Result<TopNResultValue>> expectedResults, String failMsg) {
TopNQuery query = new TopNQueryBuilder().dataSource(dataSource).granularity(allGran).dimension(marketDimension).filters(marketDimension, "spot").metric(indexMetric).threshold(3).intervals(fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(addRowsIndexConstant)).build();
failMsg += " filtered topN ";
HashMap<String, Object> context = new HashMap<>();
Iterable<Result<TopNResultValue>> actualResults = Sequences.toList(runner.run(query, context), Lists.<Result<TopNResultValue>>newArrayList());
TestHelper.assertExpectedResults(expectedResults, actualResults, failMsg);
}
Aggregations