use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class TopNQueryRunnerTest method test_topN_orderByLongNumericColumnWithNulls_returnsDescendingResults.
@Test
public void test_topN_orderByLongNumericColumnWithNulls_returnsDescendingResults() {
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE).granularity(QueryRunnerTestHelper.ALL_GRAN).dimension(new DefaultDimensionSpec("index", "index_alias", ColumnType.LONG)).metric(new NumericTopNMetricSpec("longNumericNull")).threshold(10000).aggregators(new LongSumAggregatorFactory("longNumericNull", "longNumericNull")).intervals(QueryRunnerTestHelper.SECOND_ONLY).build();
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue(Arrays.asList(ImmutableMap.<String, Object>builder().put("index_alias", 97L).put("longNumericNull", 80L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 135L).put("longNumericNull", 70L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 1049L).put("longNumericNull", 70L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 1321L).put("longNumericNull", 70L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 110L).put("longNumericNull", 50L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 1144L).put("longNumericNull", 50L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 1193L).put("longNumericNull", 50L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 113L).put("longNumericNull", 40L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 112L).put("longNumericNull", 20L).build(), ImmutableMap.<String, Object>builder().put("index_alias", 147L).put("longNumericNull", 10L).build(), makeRowWithNulls("index_alias", 114L, "longNumericNull", NullHandling.defaultLongValue()), makeRowWithNulls("index_alias", 126L, "longNumericNull", NullHandling.defaultLongValue()), makeRowWithNulls("index_alias", 166L, "longNumericNull", NullHandling.defaultLongValue())))));
assertExpectedResults(expectedResults, query);
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class IndexMergerV9WithSpatialIndexTest method testSpatialQueryWithOtherSpatialDim.
@Test
public void testSpatialQueryWithOtherSpatialDim() {
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource("test").granularity(Granularities.ALL).intervals(Collections.singletonList(Intervals.of("2013-01-01/2013-01-07"))).filters(new SpatialDimFilter("spatialIsRad", new RadiusBound(new float[] { 0.0f, 0.0f }, 5))).aggregators(Arrays.asList(new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val"))).build();
List<Result<TimeseriesResultValue>> expectedResults = Collections.singletonList(new Result<>(DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue(ImmutableMap.<String, Object>builder().put("rows", 1L).put("val", 13L).build())));
try {
TimeseriesQueryRunnerFactory factory = new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(), new TimeseriesQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
QueryRunner runner = new FinalizeResultsQueryRunner(factory.createRunner(segment), factory.getToolchest());
TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query)));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class TopNBinaryFnBenchmark method setUp.
@Override
protected void setUp() {
final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L);
final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows");
final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index");
final List<AggregatorFactory> aggregatorFactories = new ArrayList<>();
aggregatorFactories.add(new CountAggregatorFactory("rows"));
aggregatorFactories.add(new LongSumAggregatorFactory("index", "index"));
for (int i = 1; i < aggCount; i++) {
aggregatorFactories.add(new CountAggregatorFactory("rows" + i));
}
final List<PostAggregator> postAggregators = new ArrayList<>();
for (int i = 0; i < postAggCount; i++) {
postAggregators.add(new ArithmeticPostAggregator("addrowsindexconstant" + i, "+", Lists.newArrayList(constant, rowsPostAgg, indexPostAgg)));
}
final DateTime currTime = DateTimes.nowUtc();
List<Map<String, Object>> list = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 1L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 1L);
}
res.put("index", 1L);
list.add(res);
}
result1 = new Result<>(currTime, new TopNResultValue(list));
List<Map<String, Object>> list2 = new ArrayList<>();
for (int i = 0; i < threshold; i++) {
Map<String, Object> res = new HashMap<>();
res.put("testdim", "" + i);
res.put("rows", 2L);
for (int j = 0; j < aggCount; j++) {
res.put("rows" + j, 2L);
}
res.put("index", 2L);
list2.add(res);
}
result2 = new Result<>(currTime, new TopNResultValue(list2));
fn = new TopNBinaryFn(Granularities.ALL, new DefaultDimensionSpec("testdim", null), new NumericTopNMetricSpec("index"), 100, aggregatorFactories, postAggregators);
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class DefaultLimitSpecTest method testBuildWithExplicitOrder.
@Test
public void testBuildWithExplicitOrder() {
DefaultLimitSpec limitSpec = new DefaultLimitSpec(ImmutableList.of(new OrderByColumnSpec("k1", OrderByColumnSpec.Direction.ASCENDING)), 2);
Function<Sequence<ResultRow>, Sequence<ResultRow>> limitFn = limitSpec.build(GroupByQuery.builder().setDataSource("dummy").setInterval("1000/3000").setDimensions(new DefaultDimensionSpec("k1", "k1")).setAggregatorSpecs(new LongSumAggregatorFactory("k2", "k2")).setPostAggregatorSpecs(ImmutableList.of(new ConstantPostAggregator("k3", 1L))).setGranularity(Granularities.NONE).build());
Assert.assertEquals(ImmutableList.of(testRowsList.get(0), testRowsList.get(1)), limitFn.apply(Sequences.simple(testRowsList)).toList());
}
use of org.apache.druid.query.aggregation.LongSumAggregatorFactory in project druid by druid-io.
the class GroupByQueryRunnerTest method testGroupByTimeoutContextOverride.
@Test
public void testGroupByTimeoutContextOverride() {
GroupByQuery query = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(QueryRunnerTestHelper.DAY_GRAN).overrideContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 60000)).build();
List<ResultRow> expectedResults = Arrays.asList(makeRow(query, "2011-04-01", "alias", "automotive", "rows", 1L, "idx", 135L), makeRow(query, "2011-04-01", "alias", "business", "rows", 1L, "idx", 118L), makeRow(query, "2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 158L), makeRow(query, "2011-04-01", "alias", "health", "rows", 1L, "idx", 120L), makeRow(query, "2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L), makeRow(query, "2011-04-01", "alias", "news", "rows", 1L, "idx", 121L), makeRow(query, "2011-04-01", "alias", "premium", "rows", 3L, "idx", 2900L), makeRow(query, "2011-04-01", "alias", "technology", "rows", 1L, "idx", 78L), makeRow(query, "2011-04-01", "alias", "travel", "rows", 1L, "idx", 119L), makeRow(query, "2011-04-02", "alias", "automotive", "rows", 1L, "idx", 147L), makeRow(query, "2011-04-02", "alias", "business", "rows", 1L, "idx", 112L), makeRow(query, "2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 166L), makeRow(query, "2011-04-02", "alias", "health", "rows", 1L, "idx", 113L), makeRow(query, "2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L), makeRow(query, "2011-04-02", "alias", "news", "rows", 1L, "idx", 114L), makeRow(query, "2011-04-02", "alias", "premium", "rows", 3L, "idx", 2505L), makeRow(query, "2011-04-02", "alias", "technology", "rows", 1L, "idx", 97L), makeRow(query, "2011-04-02", "alias", "travel", "rows", 1L, "idx", 126L));
Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
TestHelper.assertExpectedObjects(expectedResults, results, "override-timeout");
}
Aggregations