use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsConflict.
@Test
public void testMergeAggregatorsConflict() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, Maps.<String, ColumnAnalysis>newHashMap(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleMaxAggregatorFactory("bar", "bar"), "baz", new LongMaxAggregatorFactory("baz", "baz")), null, null, null);
final Map<String, AggregatorFactory> expectedLenient = Maps.newHashMap();
expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo"));
expectedLenient.put("bar", null);
expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz"));
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators());
// Simulate multi-level merge
Assert.assertEquals(expectedLenient, mergeLenient(mergeLenient(analysis1, analysis2), mergeLenient(analysis1, analysis2)).getAggregators());
}
use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testEmptySubquery.
@Test
public void testEmptySubquery() {
GroupByQuery subquery = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource).setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval).setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias"))).setAggregatorSpecs(Arrays.asList(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
GroupByQuery query = GroupByQuery.builder().setDataSource(subquery).setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setAggregatorSpecs(Arrays.<AggregatorFactory>asList(new DoubleMaxAggregatorFactory("idx", "idx"))).setGranularity(QueryRunnerTestHelper.dayGran).build();
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
Assert.assertFalse(results.iterator().hasNext());
}
use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testTopNOverNullDimensionWithFilter.
@Test
public void testTopNOverNullDimensionWithFilter() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension("null_column").filters(new SelectorDimFilter("null_column", null, null)).metric(QueryRunnerTestHelper.indexMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Map<String, Object> map = Maps.newHashMap();
map.put("null_column", null);
map.put("rows", 1209L);
map.put("index", 503332.5071372986D);
map.put("addRowsIndexConstant", 504542.5071372986D);
map.put("uniques", QueryRunnerTestHelper.UNIQUES_9);
map.put("maxIndex", 1870.06103515625D);
map.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList(map))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testFullOnTopNDimExtractionAllNulls.
@Test
public void testFullOnTopNDimExtractionAllNulls() {
String jsFn = "function(str) { return null; }";
ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance());
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.marketDimension, jsExtractionFn)).metric(QueryRunnerTestHelper.indexMetric).threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
Map<String, Object> expectedMap = new HashMap<>();
expectedMap.put(QueryRunnerTestHelper.marketDimension, null);
expectedMap.put("rows", 1209L);
expectedMap.put("index", 503332.5071372986D);
expectedMap.put("addRowsIndexConstant", 504542.5071372986D);
expectedMap.put("uniques", 9.019833517963864);
expectedMap.put("maxIndex", 1870.06103515625D);
expectedMap.put("minIndex", 59.02102279663086D);
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(expectedMap))));
assertExpectedResults(expectedResults, query);
}
use of io.druid.query.aggregation.DoubleMaxAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method testFullOnTopNLongColumn.
@Test
public void testFullOnTopNLongColumn() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource).granularity(QueryRunnerTestHelper.allGran).dimension(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG)).metric("maxIndex").threshold(4).intervals(QueryRunnerTestHelper.fullOnInterval).aggregators(Lists.<AggregatorFactory>newArrayList(Iterables.concat(QueryRunnerTestHelper.commonAggregators, Lists.newArrayList(new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"))))).postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant)).build();
List<Result<TopNResultValue>> expectedResults = Arrays.asList(new Result<TopNResultValue>(new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.<Map<String, Object>>asList(ImmutableMap.<String, Object>builder().put("ql_alias", 1400L).put(QueryRunnerTestHelper.indexMetric, 217725.42022705078D).put("rows", 279L).put("addRowsIndexConstant", 218005.42022705078D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 1870.06103515625D).put("minIndex", 91.27055358886719D).build(), ImmutableMap.<String, Object>builder().put("ql_alias", 1600L).put(QueryRunnerTestHelper.indexMetric, 210865.67966461182D).put("rows", 279L).put("addRowsIndexConstant", 211145.67966461182D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 1862.7379150390625D).put("minIndex", 99.2845230102539D).build(), ImmutableMap.<String, Object>builder().put("ql_alias", 1000L).put(QueryRunnerTestHelper.indexMetric, 12270.807106018066D).put("rows", 93L).put("addRowsIndexConstant", 12364.807106018066D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 277.2735290527344D).put("minIndex", 71.31593322753906D).build(), ImmutableMap.<String, Object>builder().put("ql_alias", 1200L).put(QueryRunnerTestHelper.indexMetric, 12086.472755432129D).put("rows", 93L).put("addRowsIndexConstant", 12180.472755432129D).put("uniques", QueryRunnerTestHelper.UNIQUES_1).put("maxIndex", 193.78756713867188D).put("minIndex", 84.71052551269531D).build()))));
assertExpectedResults(expectedResults, query);
}
Aggregations