Search in sources :

Example 51 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class TDigestSketchSqlAggregatorTest method testQuantileOnNumericValues.

@Test
public void testQuantileOnNumericValues() throws Exception {
    cannotVectorize();
    final List<Object[]> expectedResults = ImmutableList.of(new Object[] { 1.0, 3.5, 6.0 });
    testQuery("SELECT\n" + "TDIGEST_QUANTILE(m1, 0.0), TDIGEST_QUANTILE(m1, 0.5), TDIGEST_QUANTILE(m1, 1.0)\n" + "FROM foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).aggregators(ImmutableList.of(new TDigestSketchAggregatorFactory("a0:agg", "m1", null))).postAggregators(new TDigestSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.0f), new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a0:agg"), 0.5f), new TDigestSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a0:agg"), 1.0f)).context(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
Also used : TDigestSketchAggregatorFactory(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory) TDigestSketchToQuantilePostAggregator(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) BaseCalciteQueryTest(org.apache.druid.sql.calcite.BaseCalciteQueryTest) Test(org.junit.Test)

Example 52 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class MapVirtualColumnGroupByTest method testWithMapColumn.

@Test
public void testWithMapColumn() {
    final GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("params", "params")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
    expectedException.expect(UnsupportedOperationException.class);
    expectedException.expectMessage("Map column doesn't support getRow()");
    runner.run(QueryPlus.wrap(query)).toList();
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 53 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class MapVirtualColumnGroupByTest method testWithSubColumn.

@Test
public void testWithSubColumn() {
    final GroupByQuery query = new GroupByQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, Granularities.ALL, ImmutableList.of(new DefaultDimensionSpec("params.key3", "params.key3")), ImmutableList.of(new CountAggregatorFactory("count")), null, null, null, null, null);
    final List<ResultRow> result = runner.run(QueryPlus.wrap(query)).toList();
    final List<ResultRow> expected = ImmutableList.of(new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3")), new MapBasedRow(DateTimes.of("2011-01-12T00:00:00.000Z"), MapVirtualColumnTestBase.mapOf("count", 2L))).stream().map(row -> ResultRow.fromLegacyRow(row, query)).collect(Collectors.toList());
    Assert.assertEquals(expected, result);
}
Also used : ResultRow(org.apache.druid.query.groupby.ResultRow) MapBasedRow(org.apache.druid.data.input.MapBasedRow) QueryPlus(org.apache.druid.query.QueryPlus) MapBasedRow(org.apache.druid.data.input.MapBasedRow) Intervals(org.apache.druid.java.util.common.Intervals) StupidPool(org.apache.druid.collections.StupidPool) DefaultBlockingPool(org.apache.druid.collections.DefaultBlockingPool) ByteBuffer(java.nio.ByteBuffer) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) ImmutableList(com.google.common.collect.ImmutableList) IncrementalIndex(org.apache.druid.segment.incremental.IncrementalIndex) QueryRunner(org.apache.druid.query.QueryRunner) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) GroupByStrategySelector(org.apache.druid.query.groupby.strategy.GroupByStrategySelector) ExpectedException(org.junit.rules.ExpectedException) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) Before(org.junit.Before) DateTimes(org.apache.druid.java.util.common.DateTimes) GroupByQueryRunnerFactory(org.apache.druid.query.groupby.GroupByQueryRunnerFactory) GroupByStrategyV2(org.apache.druid.query.groupby.strategy.GroupByStrategyV2) ResultRow(org.apache.druid.query.groupby.ResultRow) GroupByQueryConfig(org.apache.druid.query.groupby.GroupByQueryConfig) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test) IOException(java.io.IOException) DruidProcessingConfig(org.apache.druid.query.DruidProcessingConfig) Collectors(java.util.stream.Collectors) TableDataSource(org.apache.druid.query.TableDataSource) DefaultObjectMapper(org.apache.druid.jackson.DefaultObjectMapper) Granularities(org.apache.druid.java.util.common.granularity.Granularities) List(java.util.List) Rule(org.junit.Rule) QueryRunnerTestHelper(org.apache.druid.query.QueryRunnerTestHelper) GroupByQueryQueryToolChest(org.apache.druid.query.groupby.GroupByQueryQueryToolChest) SegmentId(org.apache.druid.timeline.SegmentId) Assert(org.junit.Assert) GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 54 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class MapVirtualColumnTopNTest method testWithMapColumn.

@Test
public void testWithMapColumn() {
    final TopNQuery query = new TopNQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), // params is the map type
    new DefaultDimensionSpec("params", "params"), new NumericTopNMetricSpec("count"), 1, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("count")), null, null);
    expectedException.expect(UnsupportedOperationException.class);
    expectedException.expectMessage("Map column doesn't support getRow()");
    runner.run(QueryPlus.wrap(query)).toList();
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TopNQuery(org.apache.druid.query.topn.TopNQuery) NumericTopNMetricSpec(org.apache.druid.query.topn.NumericTopNMetricSpec) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 55 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class DoublesSketchSqlAggregatorTest method testQuantileOnCastedString.

@Test
public void testQuantileOnCastedString() throws Exception {
    final List<Object[]> expectedResults;
    if (NullHandling.replaceWithDefault()) {
        expectedResults = ImmutableList.of(new Object[] { 0.0, 1.0, 10.1, 10.1, 20.2, 0.0, 10.1, 0.0 });
    } else {
        expectedResults = ImmutableList.of(new Object[] { 1.0, 2.0, 10.1, 10.1, 20.2, Double.NaN, 10.1, Double.NaN });
    }
    testQuery("SELECT\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.01),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.5, 64),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.98, 256),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.99),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE) * 2, 0.97),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.99) FILTER(WHERE dim2 = 'abc'),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.999) FILTER(WHERE dim2 <> 'abc'),\n" + "APPROX_QUANTILE_DS(CAST(dim1 as DOUBLE), 0.999) FILTER(WHERE dim2 = 'abc')\n" + "FROM foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).virtualColumns(new ExpressionVirtualColumn("v0", "CAST(\"dim1\", 'DOUBLE')", ColumnType.FLOAT, TestExprMacroTable.INSTANCE), new ExpressionVirtualColumn("v1", "(CAST(\"dim1\", 'DOUBLE') * 2)", ColumnType.FLOAT, TestExprMacroTable.INSTANCE)).aggregators(ImmutableList.of(new DoublesSketchAggregatorFactory("a0:agg", "v0", 128), new DoublesSketchAggregatorFactory("a1:agg", "v0", 64), new DoublesSketchAggregatorFactory("a2:agg", "v0", 256), new DoublesSketchAggregatorFactory("a4:agg", "v1", 128), new FilteredAggregatorFactory(new DoublesSketchAggregatorFactory("a5:agg", "v0", 128), new SelectorDimFilter("dim2", "abc", null)), new FilteredAggregatorFactory(new DoublesSketchAggregatorFactory("a6:agg", "v0", 128), new NotDimFilter(new SelectorDimFilter("dim2", "abc", null))))).postAggregators(new DoublesSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.01f), new DoublesSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.50f), new DoublesSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a2:agg"), 0.98f), new DoublesSketchToQuantilePostAggregator("a3", makeFieldAccessPostAgg("a0:agg"), 0.99f), new DoublesSketchToQuantilePostAggregator("a4", makeFieldAccessPostAgg("a4:agg"), 0.97f), new DoublesSketchToQuantilePostAggregator("a5", makeFieldAccessPostAgg("a5:agg"), 0.99f), new DoublesSketchToQuantilePostAggregator("a6", makeFieldAccessPostAgg("a6:agg"), 0.999f), new DoublesSketchToQuantilePostAggregator("a7", makeFieldAccessPostAgg("a5:agg"), 0.999f)).context(QUERY_CONTEXT_DEFAULT).build()), expectedResults);
}
Also used : FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) ExpressionVirtualColumn(org.apache.druid.segment.virtual.ExpressionVirtualColumn) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) DoublesSketchToQuantilePostAggregator(org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToQuantilePostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DoublesSketchAggregatorFactory(org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory) BaseCalciteQueryTest(org.apache.druid.sql.calcite.BaseCalciteQueryTest) Test(org.junit.Test)

Aggregations

MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)142 Test (org.junit.Test)115 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)53 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)44 BaseCalciteQueryTest (org.apache.druid.sql.calcite.BaseCalciteQueryTest)39 QuerySegmentSpec (org.apache.druid.query.spec.QuerySegmentSpec)28 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)25 ResponseContext (org.apache.druid.query.context.ResponseContext)22 QueryRunner (org.apache.druid.query.QueryRunner)21 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)21 QueryPlus (org.apache.druid.query.QueryPlus)20 TableDataSource (org.apache.druid.query.TableDataSource)19 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)18 ArrayList (java.util.ArrayList)17 QueryDataSource (org.apache.druid.query.QueryDataSource)15 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)15 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)15 Sequence (org.apache.druid.java.util.common.guava.Sequence)14 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)14 Result (org.apache.druid.query.Result)13