use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class IncrementalIndexIngestionTest method testOnHeapIncrementalIndexClose.
@Test
public void testOnHeapIncrementalIndexClose() throws Exception {
// Prepare the mocks & set close() call count expectation to 1
Aggregator mockedAggregator = EasyMock.createMock(LongMaxAggregator.class);
mockedAggregator.close();
EasyMock.expectLastCall().times(1);
final IncrementalIndex genericIndex = indexCreator.createIndex(new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.MINUTE).withMetrics(new LongMaxAggregatorFactory("max", "max")).build());
// This test is specific to the on-heap index
if (!(genericIndex instanceof OnheapIncrementalIndex)) {
return;
}
final OnheapIncrementalIndex index = (OnheapIncrementalIndex) genericIndex;
index.add(new MapBasedInputRow(0, Collections.singletonList("billy"), ImmutableMap.of("billy", 1, "max", 1)));
// override the aggregators with the mocks
index.concurrentGet(0)[0] = mockedAggregator;
// close the indexer and validate the expectations
EasyMock.replay(mockedAggregator);
index.close();
EasyMock.verify(mockedAggregator);
}
use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class SegmentMetadataQueryQueryToolChestTest method testMergeAggregatorsConflict.
@Test
public void testMergeAggregatorsConflict() {
final SegmentAnalysis analysis1 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleSumAggregatorFactory("bar", "bar")), null, null, null);
final SegmentAnalysis analysis2 = new SegmentAnalysis("id", null, new HashMap<>(), 0, 0, ImmutableMap.of("foo", new LongSumAggregatorFactory("foo", "foo"), "bar", new DoubleMaxAggregatorFactory("bar", "bar"), "baz", new LongMaxAggregatorFactory("baz", "baz")), null, null, null);
final Map<String, AggregatorFactory> expectedLenient = new HashMap<>();
expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo"));
expectedLenient.put("bar", null);
expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz"));
Assert.assertNull(mergeStrict(analysis1, analysis2).getAggregators());
Assert.assertEquals(expectedLenient, mergeLenient(analysis1, analysis2).getAggregators());
// Simulate multi-level merge
Assert.assertEquals(expectedLenient, mergeLenient(mergeLenient(analysis1, analysis2), mergeLenient(analysis1, analysis2)).getAggregators());
}
use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class CalciteQueryTest method testMinMaxAvgDailyCountWithLimit.
@Test
public void testMinMaxAvgDailyCountWithLimit() throws Exception {
// Cannot vectorize due to virtual columns.
cannotVectorize();
testQuery("SELECT * FROM (" + " SELECT max(cnt), min(cnt), avg(cnt), TIME_EXTRACT(max(t), 'EPOCH') last_time, count(1) num_days FROM (\n" + " SELECT TIME_FLOOR(__time, 'P1D') AS t, count(1) cnt\n" + " FROM \"foo\"\n" + " GROUP BY 1\n" + " )" + ") LIMIT 1\n", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC)).intervals(querySegmentSpec(Filtration.eternity())).aggregators(new CountAggregatorFactory("a0")).context(getTimeseriesContextWithFloorTime(TIMESERIES_CONTEXT_BY_GRAN, "d0")).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(useDefault ? aggregators(new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), new CountAggregatorFactory("_a2:count"), new LongMaxAggregatorFactory("_a3", "d0"), new CountAggregatorFactory("_a4")) : aggregators(new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), new FilteredAggregatorFactory(new CountAggregatorFactory("_a2:count"), not(selector("a0", null, null))), new LongMaxAggregatorFactory("_a3", "d0"), new CountAggregatorFactory("_a4"))).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("_a2", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "_a2:sum"), new FieldAccessPostAggregator(null, "_a2:count"))), expressionPostAgg("p0", "timestamp_extract(\"_a3\",'EPOCH','UTC')"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1L, 1L, 1L, 978480000L, 6L }));
}
use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class CalciteJoinQueryTest method testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery.
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testSemiAndAntiJoinSimultaneouslyUsingWhereInSubquery(Map<String, Object> queryContext) throws Exception {
cannotVectorize();
testQuery("SELECT dim1, COUNT(*) FROM foo\n" + "WHERE dim1 IN ('abc', 'def')\n" + "AND __time IN (SELECT MAX(__time) FROM foo)\n" + "AND __time NOT IN (SELECT MIN(__time) FROM foo)\n" + "GROUP BY 1", queryContext, ImmutableList.of(GroupByQuery.builder().setDataSource(join(join(join(new TableDataSource(CalciteTests.DATASOURCE1), new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(new LongMaxAggregatorFactory("a0", "__time")).context(QUERY_CONTEXT_DEFAULT).build()), "j0.", "(\"__time\" == \"j0.a0\")", JoinType.INNER), new QueryDataSource(GroupByQuery.builder().setDataSource(new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(new LongMinAggregatorFactory("a0", "__time")).context(QUERY_CONTEXT_DEFAULT).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(new CountAggregatorFactory("_a0"), NullHandling.sqlCompatible() ? new FilteredAggregatorFactory(new CountAggregatorFactory("_a1"), not(selector("a0", null, null))) : new CountAggregatorFactory("_a1")).setContext(QUERY_CONTEXT_DEFAULT).build()), "_j0.", "1", JoinType.INNER), new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(new LongMinAggregatorFactory("a0", "__time")).postAggregators(expressionPostAgg("p0", "1")).context(QUERY_CONTEXT_DEFAULT).build()), "__j0.", "(\"__time\" == \"__j0.a0\")", JoinType.LEFT)).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(and(in("dim1", ImmutableList.of("abc", "def"), null), or(selector("_j0._a0", "0", null), and(selector("__j0.p0", null, null), expressionFilter("(\"_j0._a1\" >= \"_j0._a0\")"))))).setDimensions(dimensions(new DefaultDimensionSpec("dim1", "d0", ColumnType.STRING))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(queryContext).build()), ImmutableList.of(new Object[] { "abc", 1L }));
}
use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.
the class CalciteCorrelatedQueryTest method testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled.
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map<String, Object> queryContext) throws Exception {
cannotVectorize();
testQuery("select country, ANY_VALUE(\n" + " select max(\"users\") from (\n" + " select floor(__time to day), count(*) \"users\" from visits f where f.country = visits.country group by 1\n" + " )\n" + " ) as \"dailyVisits\"\n" + "from visits \n" + " where city = 'B' and __time between '2021-01-01 01:00:00' AND '2021-01-02 23:59:59'" + " group by 1", queryContext, ImmutableList.of(GroupByQuery.builder().setDataSource(join(new QueryDataSource(newScanQueryBuilder().dataSource(CalciteTests.USERVISITDATASOURCE).intervals(querySegmentSpec(Intervals.of("2021-01-01T01:00:00.000Z/2021-01-02T23:59:59.001Z"))).filters(selector("city", "B", null)).columns("__time", "city", "country").build()), new QueryDataSource(GroupByQuery.builder().setDataSource(GroupByQuery.builder().setDataSource(CalciteTests.USERVISITDATASOURCE).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setVirtualColumns(new ExpressionVirtualColumn("v0", "timestamp_floor(\"__time\",'P1D',null,'UTC')", ColumnType.LONG, TestExprMacroTable.INSTANCE)).setDimFilter(not(selector("country", null, null))).setDimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG), new DefaultDimensionSpec("country", "d1")).setAggregatorSpecs(new CountAggregatorFactory("a0")).setContext(withTimestampResultContext(queryContext, "d0", Granularities.DAY)).setGranularity(new AllGranularity()).build()).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setDimensions(new DefaultDimensionSpec("d1", "_d0")).setAggregatorSpecs(new LongMaxAggregatorFactory("_a0", "a0")).setGranularity(new AllGranularity()).setContext(queryContext).build()), "j0.", equalsCondition(makeColumnExpression("country"), makeColumnExpression("j0._d0")), JoinType.LEFT)).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setDimensions(new DefaultDimensionSpec("country", "d0")).setAggregatorSpecs(new LongAnyAggregatorFactory("a0", "j0._a0")).setGranularity(new AllGranularity()).setContext(queryContext).build()), ImmutableList.of(new Object[] { "canada", 4L }));
}
Aggregations