Search in sources :

Example 1 with LongMaxAggregatorFactory

use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.

the class GroupByQueryRunnerTest method testGroupByNestedWithInnerQueryNumericsWithLongTime.

@Test
public void testGroupByNestedWithInnerQueryNumericsWithLongTime() {
    if (config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V1)) {
        expectedException.expect(UnsupportedOperationException.class);
        expectedException.expectMessage("GroupBy v1 only supports dimensions with an outputType of STRING.");
    }
    GroupByQuery subQuery = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("__time", "time_alias", ColumnType.LONG), new DefaultDimensionSpec("index", "index_alias", ColumnType.FLOAT)).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
    GroupByQuery outerQuery = makeQueryBuilder().setDataSource(subQuery).setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new DefaultDimensionSpec("alias", "market"), new DefaultDimensionSpec("time_alias", "time_alias2", ColumnType.LONG)).setAggregatorSpecs(new LongMaxAggregatorFactory("time_alias_max", "time_alias"), new DoubleMaxAggregatorFactory("index_alias_max", "index_alias")).setGranularity(QueryRunnerTestHelper.ALL_GRAN).build();
    List<ResultRow> expectedResults = Arrays.asList(makeRow(outerQuery, "2011-04-01", "market", "spot", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 158.74722290039062), makeRow(outerQuery, "2011-04-01", "market", "spot", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 166.01605224609375), makeRow(outerQuery, "2011-04-01", "market", "total_market", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 1522.043701171875), makeRow(outerQuery, "2011-04-01", "market", "total_market", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 1321.375), makeRow(outerQuery, "2011-04-01", "market", "upfront", "time_alias2", 1301616000000L, "time_alias_max", 1301616000000L, "index_alias_max", 1447.3411865234375), makeRow(outerQuery, "2011-04-01", "market", "upfront", "time_alias2", 1301702400000L, "time_alias_max", 1301702400000L, "index_alias_max", 1144.3424072265625));
    Iterable<ResultRow> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, outerQuery);
    TestHelper.assertExpectedObjects(expectedResults, results, "numerics");
}
Also used : DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 2 with LongMaxAggregatorFactory

use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.

the class CalciteJoinQueryTest method testNotInAggregationSubquery.

@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testNotInAggregationSubquery(Map<String, Object> queryContext) throws Exception {
    // Cannot vectorize JOIN operator.
    cannotVectorize();
    testQuery("SELECT DISTINCT __time FROM druid.foo WHERE __time NOT IN (SELECT MAX(__time) FROM druid.foo)", queryContext, ImmutableList.of(GroupByQuery.builder().setDataSource(join(join(new TableDataSource(CalciteTests.DATASOURCE1), new QueryDataSource(GroupByQuery.builder().setDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(new LongMaxAggregatorFactory("a0", "__time")).context(QUERY_CONTEXT_DEFAULT).build()).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(new CountAggregatorFactory("_a0"), NullHandling.sqlCompatible() ? new FilteredAggregatorFactory(new CountAggregatorFactory("_a1"), not(selector("a0", null, null))) : new CountAggregatorFactory("_a1")).setContext(queryContext).build()), "j0.", "1", JoinType.INNER), new QueryDataSource(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(querySegmentSpec(Filtration.eternity())).granularity(Granularities.ALL).aggregators(new LongMaxAggregatorFactory("a0", "__time")).postAggregators(expressionPostAgg("p0", "1")).context(QUERY_CONTEXT_DEFAULT).build()), "_j0.", "(\"__time\" == \"_j0.a0\")", JoinType.LEFT)).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(or(selector("j0._a0", "0", null), and(selector("_j0.p0", null, null), expressionFilter("(\"j0._a1\" >= \"j0._a0\")")))).setDimensions(dimensions(new DefaultDimensionSpec("__time", "d0", ColumnType.LONG))).setContext(queryContext).build()), ImmutableList.of(new Object[] { timestamp("2000-01-01") }, new Object[] { timestamp("2000-01-02") }, new Object[] { timestamp("2000-01-03") }, new Object[] { timestamp("2001-01-01") }, new Object[] { timestamp("2001-01-02") }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) GlobalTableDataSource(org.apache.druid.query.GlobalTableDataSource) TableDataSource(org.apache.druid.query.TableDataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 3 with LongMaxAggregatorFactory

use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.

the class CalciteCorrelatedQueryTest method testCorrelatedSubqueryWithCorrelatedQueryFilter.

@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map<String, Object> queryContext) throws Exception {
    cannotVectorize();
    queryContext = withLeftDirectAccessEnabled(queryContext);
    testQuery("select country, ANY_VALUE(\n" + "        select max(\"users\") from (\n" + "            select floor(__time to day), count(user) \"users\" from visits f where f.country = visits.country and f.city = 'A' group by 1\n" + "        )\n" + "     ) as \"dailyVisits\"\n" + "from visits \n" + " where city = 'B'" + " group by 1", queryContext, ImmutableList.of(GroupByQuery.builder().setDataSource(join(new TableDataSource(CalciteTests.USERVISITDATASOURCE), new QueryDataSource(GroupByQuery.builder().setDataSource(GroupByQuery.builder().setDataSource(CalciteTests.USERVISITDATASOURCE).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setVirtualColumns(new ExpressionVirtualColumn("v0", "timestamp_floor(\"__time\",'P1D',null,'UTC')", ColumnType.LONG, TestExprMacroTable.INSTANCE)).setDimensions(new DefaultDimensionSpec("v0", "d0", ColumnType.LONG), new DefaultDimensionSpec("country", "d1")).setAggregatorSpecs(new FilteredAggregatorFactory(new CountAggregatorFactory("a0"), not(selector("user", null, null)))).setDimFilter(and(selector("city", "A", null), not(selector("country", null, null)))).setContext(withTimestampResultContext(queryContext, "d0", Granularities.DAY)).setGranularity(new AllGranularity()).build()).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setDimensions(new DefaultDimensionSpec("d1", "_d0")).setAggregatorSpecs(new LongMaxAggregatorFactory("_a0", "a0")).setGranularity(new AllGranularity()).setContext(queryContext).build()), "j0.", equalsCondition(makeColumnExpression("country"), makeColumnExpression("j0._d0")), JoinType.LEFT, selector("city", "B", null))).setQuerySegmentSpec(querySegmentSpec(Intervals.ETERNITY)).setDimensions(new DefaultDimensionSpec("country", "d0")).setAggregatorSpecs(new LongAnyAggregatorFactory("a0", "j0._a0")).setGranularity(new AllGranularity()).setContext(queryContext).build()), ImmutableList.of(new Object[] { "canada", 2L }));
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) ExpressionVirtualColumn(org.apache.druid.segment.virtual.ExpressionVirtualColumn) TableDataSource(org.apache.druid.query.TableDataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) LongAnyAggregatorFactory(org.apache.druid.query.aggregation.any.LongAnyAggregatorFactory) AllGranularity(org.apache.druid.java.util.common.granularity.AllGranularity) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Parameters(junitparams.Parameters) Test(org.junit.Test)

Example 4 with LongMaxAggregatorFactory

use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.

the class NestedQueryPushDownTest method testNestedQueryWithRenamedDimensions.

@Test
public void testNestedQueryWithRenamedDimensions() {
    QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)));
    GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimA", "dimA"), new DefaultDimensionSpec("dimB", "newDimB")).setAggregatorSpecs(new LongSumAggregatorFactory("metASum", "metA"), new LongSumAggregatorFactory("metBSum", "metB")).setGranularity(Granularities.ALL).build();
    GroupByQuery nestedQuery = GroupByQuery.builder().setDataSource(query).setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("newDimB", "renamedDimB")).setAggregatorSpecs(new LongMaxAggregatorFactory("maxBSum", "metBSum")).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY, true)).setGranularity(Granularities.ALL).build();
    Sequence<ResultRow> queryResult = runNestedQueryWithForcePushDown(nestedQuery);
    List<ResultRow> results = queryResult.toList();
    ResultRow expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow(nestedQuery, "2017-07-14T02:40:00.000Z", "renamedDimB", "sour", "maxBSum", 20L);
    ResultRow expectedRow1 = GroupByQueryRunnerTestHelper.createExpectedRow(nestedQuery, "2017-07-14T02:40:00.000Z", "renamedDimB", "sweet", "maxBSum", 60L);
    Assert.assertEquals(2, results.size());
    Assert.assertEquals(expectedRow0, results.get(0));
    Assert.assertEquals(expectedRow1, results.get(1));
}
Also used : LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 5 with LongMaxAggregatorFactory

use of org.apache.druid.query.aggregation.LongMaxAggregatorFactory in project druid by druid-io.

the class MetadataTest method testMerge.

@Test
public void testMerge() {
    Assert.assertNull(Metadata.merge(null, null));
    Assert.assertNull(Metadata.merge(ImmutableList.of(), null));
    List<Metadata> metadataToBeMerged = new ArrayList<>();
    metadataToBeMerged.add(null);
    Assert.assertNull(Metadata.merge(metadataToBeMerged, null));
    // sanity merge check
    AggregatorFactory[] aggs = new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "f") };
    final Metadata m1 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
    final Metadata m2 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
    final Metadata m3 = new Metadata(Collections.singletonMap("k", "v"), aggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.TRUE);
    final Metadata merged = new Metadata(Collections.singletonMap("k", "v"), new AggregatorFactory[] { new LongMaxAggregatorFactory("n", "n") }, new TimestampSpec("ds", "auto", null), Granularities.ALL, Boolean.FALSE);
    Assert.assertEquals(merged, Metadata.merge(ImmutableList.of(m1, m2), null));
    // merge check with one metadata being null
    metadataToBeMerged.clear();
    metadataToBeMerged.add(m1);
    metadataToBeMerged.add(m2);
    metadataToBeMerged.add(null);
    final Metadata merged2 = new Metadata(Collections.singletonMap("k", "v"), null, null, null, null);
    Assert.assertEquals(merged2, Metadata.merge(metadataToBeMerged, null));
    // merge check with client explicitly providing merged aggregators
    AggregatorFactory[] explicitAggs = new AggregatorFactory[] { new DoubleMaxAggregatorFactory("x", "y") };
    final Metadata merged3 = new Metadata(Collections.singletonMap("k", "v"), explicitAggs, null, null, null);
    Assert.assertEquals(merged3, Metadata.merge(metadataToBeMerged, explicitAggs));
    final Metadata merged4 = new Metadata(Collections.singletonMap("k", "v"), explicitAggs, new TimestampSpec("ds", "auto", null), Granularities.ALL, null);
    Assert.assertEquals(merged4, Metadata.merge(ImmutableList.of(m3, m2), explicitAggs));
}
Also used : DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) ArrayList(java.util.ArrayList) TimestampSpec(org.apache.druid.data.input.impl.TimestampSpec) DoubleMaxAggregatorFactory(org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) LongMaxAggregatorFactory(org.apache.druid.query.aggregation.LongMaxAggregatorFactory) Test(org.junit.Test)

Aggregations

LongMaxAggregatorFactory (org.apache.druid.query.aggregation.LongMaxAggregatorFactory)18 Test (org.junit.Test)14 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)9 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)9 QueryDataSource (org.apache.druid.query.QueryDataSource)8 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)8 Parameters (junitparams.Parameters)7 TableDataSource (org.apache.druid.query.TableDataSource)6 FilteredAggregatorFactory (org.apache.druid.query.aggregation.FilteredAggregatorFactory)6 ArrayList (java.util.ArrayList)4 AllGranularity (org.apache.druid.java.util.common.granularity.AllGranularity)4 DoubleSumAggregatorFactory (org.apache.druid.query.aggregation.DoubleSumAggregatorFactory)4 LongAnyAggregatorFactory (org.apache.druid.query.aggregation.any.LongAnyAggregatorFactory)4 ExpressionVirtualColumn (org.apache.druid.segment.virtual.ExpressionVirtualColumn)4 LinkedHashMap (java.util.LinkedHashMap)3 List (java.util.List)3 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)3 DoubleMaxAggregatorFactory (org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory)3 DoubleMinAggregatorFactory (org.apache.druid.query.aggregation.DoubleMinAggregatorFactory)3 LongMinAggregatorFactory (org.apache.druid.query.aggregation.LongMinAggregatorFactory)3