Search in sources :

Example 1 with LongMinAggregatorFactory

use of org.apache.druid.query.aggregation.LongMinAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testQueryWithSelectProjectAndIdentityProjectDoesNotRename.

@Test
public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws Exception {
    cannotVectorize();
    requireMergeBuffers(3);
    testQuery(PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of(PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, "true")), "SELECT\n" + "(SUM(CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN 1 ELSE 0 END)*1.0/COUNT(DISTINCT CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN dim1 END))\n" + "FROM druid.foo\n" + "GROUP BY ()", CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", "case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),\"dim1\",null)", ColumnType.STRING)).setDimensions(dimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.STRING))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", null, "case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),1,0)", ExprMacroTable.nil()), new GroupingAggregatorFactory("a1", ImmutableList.of("v0")))).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("d0"), ImmutableList.of())).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new FilteredAggregatorFactory(new LongMinAggregatorFactory("_a0", "a0"), selector("a1", "1", null)), new FilteredAggregatorFactory(new CountAggregatorFactory("_a1"), and(not(selector("d0", null, null)), selector("a1", "0", null))))).setPostAggregatorSpecs(Collections.singletonList(new ExpressionPostAggregator("p0", "((\"_a0\" * 1.0) / \"_a1\")", null, ExprMacroTable.nil()))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1.0d }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) GroupingAggregatorFactory(org.apache.druid.query.aggregation.GroupingAggregatorFactory) ExpressionPostAggregator(org.apache.druid.query.aggregation.post.ExpressionPostAggregator) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Test(org.junit.Test)

Example 2 with LongMinAggregatorFactory

use of org.apache.druid.query.aggregation.LongMinAggregatorFactory in project druid by druid-io.

the class SchemaEvolutionTest method testNumericEvolutionFiltering.

@Test
@Parameters(method = "doVectorize")
public void testNumericEvolutionFiltering(boolean doVectorize) {
    final TimeseriesQueryRunnerFactory factory = QueryRunnerTestHelper.newTimeseriesQueryRunnerFactory();
    // "c1" changes from string(1) -> long(2) -> float(3) -> nonexistent(4)
    // test behavior of filtering
    final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder().dataSource(DATA_SOURCE).intervals("1000/3000").filters(new BoundDimFilter("c1", "9", "11", false, false, null, null, StringComparators.NUMERIC)).aggregators(ImmutableList.of(new LongSumAggregatorFactory("a", "c1"), new DoubleSumAggregatorFactory("b", "c1"), new FloatSumAggregatorFactory("d", "c1"), new LongMinAggregatorFactory("e", "c1"), new CountAggregatorFactory("c"))).context(ImmutableMap.of(QueryContexts.VECTORIZE_KEY, doVectorize)).build();
    // Only string(1) -- which we can filter but not aggregate
    Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.1, "c", 2L, "d", 19.1f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index1)));
    // Only long(2) -- which we can filter and aggregate
    Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.0, "c", 2L, "d", 19.0f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index2)));
    // Only float(3) -- which we can't filter, but can aggregate
    Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 19L, "b", 19.1, "c", 2L, "d", 19.1f, "e", 9L)), runQuery(query, factory, ImmutableList.of(index3)));
    // Only nonexistent(4)
    Assert.assertEquals(timeseriesResult(TestHelper.createExpectedMap("a", NullHandling.defaultLongValue(), "b", NullHandling.defaultDoubleValue(), "c", 0L, "d", NullHandling.defaultFloatValue(), "e", NullHandling.sqlCompatible() ? null : Long.MAX_VALUE)), runQuery(query, factory, ImmutableList.of(index4)));
    // string(1) + long(2) + float(3) + nonexistent(4)
    Assert.assertEquals(timeseriesResult(ImmutableMap.of("a", 57L, "b", 57.2, "c", 6L, "d", 57.20000076293945, "e", 9L)), runQuery(query, factory, ImmutableList.of(index1, index2, index3, index4)));
}
Also used : TimeseriesQueryRunnerFactory(org.apache.druid.query.timeseries.TimeseriesQueryRunnerFactory) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) FloatSumAggregatorFactory(org.apache.druid.query.aggregation.FloatSumAggregatorFactory) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 3 with LongMinAggregatorFactory

use of org.apache.druid.query.aggregation.LongMinAggregatorFactory in project druid by druid-io.

the class SchemaEvolutionTest method setUp.

@Before
public void setUp() throws IOException {
    NullHandling.initializeForTests();
    // Index1: c1 is a string, c2 nonexistent, "uniques" nonexistent
    index1 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c1"))).buildMMappedIndex();
    // Index2: c1 is a long, c2 is a string, "uniques" is uniques on c2, "longmin" is min on c1
    index2 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new LongSumAggregatorFactory("c1", "c1"), new HyperUniquesAggregatorFactory("uniques", "c2"), new LongMinAggregatorFactory("longmin", "c1")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c2"))).buildMMappedIndex();
    // Index3: c1 is a float, c2 is a string, "uniques" is uniques on c2
    index3 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new CountAggregatorFactory("cnt"), new DoubleSumAggregatorFactory("c1", "c1"), new HyperUniquesAggregatorFactory("uniques", "c2")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of("c2"))).buildMMappedIndex();
    // Index4: c1 is nonexistent, c2 is uniques on c2
    index4 = IndexBuilder.create().tmpDir(temporaryFolder.newFolder()).schema(new IncrementalIndexSchema.Builder().withMetrics(new HyperUniquesAggregatorFactory("c2", "c2")).withRollup(false).build()).rows(inputRowsWithDimensions(ImmutableList.of())).buildMMappedIndex();
    if (index4.getAvailableDimensions().size() != 0) {
        // Just double-checking that the exclusions are working properly
        throw new ISE("Expected no dimensions in index4");
    }
}
Also used : CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DoubleSumAggregatorFactory(org.apache.druid.query.aggregation.DoubleSumAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) HyperUniquesAggregatorFactory(org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) ISE(org.apache.druid.java.util.common.ISE) IncrementalIndexSchema(org.apache.druid.segment.incremental.IncrementalIndexSchema) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Before(org.junit.Before)

Example 4 with LongMinAggregatorFactory

use of org.apache.druid.query.aggregation.LongMinAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testMinMaxAvgDailyCountWithLimit.

@Test
public void testMinMaxAvgDailyCountWithLimit() throws Exception {
    // Cannot vectorize due to virtual columns.
    cannotVectorize();
    testQuery("SELECT * FROM (" + "  SELECT max(cnt), min(cnt), avg(cnt), TIME_EXTRACT(max(t), 'EPOCH') last_time, count(1) num_days FROM (\n" + "      SELECT TIME_FLOOR(__time, 'P1D') AS t, count(1) cnt\n" + "      FROM \"foo\"\n" + "      GROUP BY 1\n" + "  )" + ") LIMIT 1\n", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC)).intervals(querySegmentSpec(Filtration.eternity())).aggregators(new CountAggregatorFactory("a0")).context(getTimeseriesContextWithFloorTime(TIMESERIES_CONTEXT_BY_GRAN, "d0")).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(useDefault ? aggregators(new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), new CountAggregatorFactory("_a2:count"), new LongMaxAggregatorFactory("_a3", "d0"), new CountAggregatorFactory("_a4")) : aggregators(new LongMaxAggregatorFactory("_a0", "a0"), new LongMinAggregatorFactory("_a1", "a0"), new LongSumAggregatorFactory("_a2:sum", "a0"), new FilteredAggregatorFactory(new CountAggregatorFactory("_a2:count"), not(selector("a0", null, null))), new LongMaxAggregatorFactory("_a3", "d0"), new CountAggregatorFactory("_a4"))).setPostAggregatorSpecs(ImmutableList.of(new ArithmeticPostAggregator("_a2", "quotient", ImmutableList.of(new FieldAccessPostAggregator(null, "_a2:sum"), new FieldAccessPostAggregator(null, "_a2:count"))), expressionPostAgg("p0", "timestamp_extract(\"_a3\",'EPOCH','UTC')"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1L, 1L, 1L, 978480000L, 6L }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) ArithmeticPostAggregator(org.apache.druid.query.aggregation.post.ArithmeticPostAggregator) FinalizingFieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator) FieldAccessPostAggregator(org.apache.druid.query.aggregation.post.FieldAccessPostAggregator) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Test(org.junit.Test)

Example 5 with LongMinAggregatorFactory

use of org.apache.druid.query.aggregation.LongMinAggregatorFactory in project druid by druid-io.

the class CalciteQueryTest method testExactCountDistinctWithFilter.

@Test
public void testExactCountDistinctWithFilter() throws Exception {
    final String sqlQuery = "SELECT COUNT(DISTINCT foo.dim1) FILTER(WHERE foo.cnt = 1), SUM(foo.cnt) FROM druid.foo";
    // to a bug in the Calcite's rule (AggregateExpandDistinctAggregatesRule)
    try {
        testQuery(PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of(PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, "false")), // Enable exact count distinct
        sqlQuery, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(), ImmutableList.of());
        Assert.fail("query execution should fail");
    } catch (RuntimeException e) {
        Assert.assertTrue(e.getMessage().contains("Error while applying rule AggregateExpandDistinctAggregatesRule"));
    }
    requireMergeBuffers(3);
    testQuery(PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of(PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT, "true")), sqlQuery, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setVirtualColumns(expressionVirtualColumn("v0", NullHandling.replaceWithDefault() ? "(\"cnt\" == 1)" : "((\"cnt\" == 1) > 0)", ColumnType.LONG)).setDimensions(dimensions(new DefaultDimensionSpec("dim1", "d0", ColumnType.STRING), new DefaultDimensionSpec("v0", "d1", ColumnType.LONG))).setAggregatorSpecs(aggregators(new LongSumAggregatorFactory("a0", "cnt"), new GroupingAggregatorFactory("a1", Arrays.asList("dim1", "v0")))).setSubtotalsSpec(ImmutableList.of(ImmutableList.of("d0", "d1"), ImmutableList.of())).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new FilteredAggregatorFactory(new CountAggregatorFactory("_a0"), and(not(selector("d0", null, null)), selector("a1", "0", null))), new FilteredAggregatorFactory(new LongMinAggregatorFactory("_a1", "a0"), selector("a1", "3", null)))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { NullHandling.replaceWithDefault() ? 5L : 6L, 6L }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) GroupingAggregatorFactory(org.apache.druid.query.aggregation.GroupingAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) LongMinAggregatorFactory(org.apache.druid.query.aggregation.LongMinAggregatorFactory) Test(org.junit.Test)

Aggregations

CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)7 LongMinAggregatorFactory (org.apache.druid.query.aggregation.LongMinAggregatorFactory)7 Test (org.junit.Test)6 QueryDataSource (org.apache.druid.query.QueryDataSource)5 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)5 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)4 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)4 Parameters (junitparams.Parameters)3 LongMaxAggregatorFactory (org.apache.druid.query.aggregation.LongMaxAggregatorFactory)3 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)2 TableDataSource (org.apache.druid.query.TableDataSource)2 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)2 GroupingAggregatorFactory (org.apache.druid.query.aggregation.GroupingAggregatorFactory)2 ISE (org.apache.druid.java.util.common.ISE)1 PeriodGranularity (org.apache.druid.java.util.common.granularity.PeriodGranularity)1 FloatSumAggregatorFactory (org.apache.druid.query.aggregation.FloatSumAggregatorFactory)1 HyperUniquesAggregatorFactory (org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory)1 ArithmeticPostAggregator (org.apache.druid.query.aggregation.post.ArithmeticPostAggregator)1 ExpressionPostAggregator (org.apache.druid.query.aggregation.post.ExpressionPostAggregator)1 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)1