Search in sources :

Example 76 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class GroupByTypeInterfaceBenchmark method setupQueries.

private void setupQueries() {
    // queries for the basic schema
    Map<String, GroupByQuery> basicQueries = new LinkedHashMap<>();
    GeneratorSchemaInfo basicSchema = GeneratorBasicSchemas.SCHEMA_MAP.get("basic");
    {
        // basic.A
        QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
        List<AggregatorFactory> queryAggs = new ArrayList<>();
        queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
        GroupByQuery queryString = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimSequential", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularity.fromString(queryGranularity)).build();
        GroupByQuery queryLongFloat = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("metLongUniform", null), new DefaultDimensionSpec("metFloatNormal", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularity.fromString(queryGranularity)).build();
        GroupByQuery queryLong = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("metLongUniform", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularity.fromString(queryGranularity)).build();
        GroupByQuery queryFloat = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("metFloatNormal", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularity.fromString(queryGranularity)).build();
        basicQueries.put("string", queryString);
        basicQueries.put("longFloat", queryLongFloat);
        basicQueries.put("long", queryLong);
        basicQueries.put("float", queryFloat);
    }
    {
        // basic.nested
        QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
        List<AggregatorFactory> queryAggs = new ArrayList<>();
        queryAggs.add(new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"));
        GroupByQuery subqueryA = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimSequential", null), new DefaultDimensionSpec("dimZipf", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularities.DAY).build();
        GroupByQuery queryA = GroupByQuery.builder().setDataSource(subqueryA).setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimSequential", null)).setAggregatorSpecs(queryAggs).setGranularity(Granularities.WEEK).build();
        basicQueries.put("nested", queryA);
    }
    SCHEMA_QUERY_MAP.put("basic", basicQueries);
}
Also used : GroupByQuery(org.apache.druid.query.groupby.GroupByQuery) GeneratorSchemaInfo(org.apache.druid.segment.generator.GeneratorSchemaInfo) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) QuerySegmentSpec(org.apache.druid.query.spec.QuerySegmentSpec) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) List(java.util.List) ArrayList(java.util.ArrayList) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) LinkedHashMap(java.util.LinkedHashMap)

Example 77 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class MapVirtualColumnTopNTest method testWithSubColumn.

@Test
public void testWithSubColumn() {
    final TopNQuery query = new TopNQuery(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), // params.key3 is string
    new DefaultDimensionSpec("params.key3", "params.key3"), new NumericTopNMetricSpec("count"), 2, new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("count")), null, null);
    final List<Result<TopNResultValue>> result = runner.run(QueryPlus.wrap(query)).toList();
    final List<Result<TopNResultValue>> expected = Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(ImmutableList.of(new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 2L, "params.key3", null)), new DimensionAndMetricValueExtractor(MapVirtualColumnTestBase.mapOf("count", 1L, "params.key3", "value3"))))));
    Assert.assertEquals(expected, result);
}
Also used : TopNResultValue(org.apache.druid.query.topn.TopNResultValue) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) TopNQuery(org.apache.druid.query.topn.TopNQuery) NumericTopNMetricSpec(org.apache.druid.query.topn.NumericTopNMetricSpec) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) DimensionAndMetricValueExtractor(org.apache.druid.query.topn.DimensionAndMetricValueExtractor) Result(org.apache.druid.query.Result) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 78 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class TDigestSketchSqlAggregatorTest method testComputingSketchOnNumericValues.

@Test
public void testComputingSketchOnNumericValues() throws Exception {
    cannotVectorize();
    testQuery("SELECT\n" + "TDIGEST_GENERATE_SKETCH(m1, 200)" + "FROM foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).aggregators(ImmutableList.of(new TDigestSketchAggregatorFactory("a0:agg", "m1", 200))).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new String[] { "\"AAAAAT/wAAAAAAAAQBgAAAAAAABAaQAAAAAAAAAAAAY/8AAAAAAAAD/wAAAAAAAAP/AAAAAAAABAAAAAAAAAAD/wAAAAAAAAQAgAAAAAAAA/8AAAAAAAAEAQAAAAAAAAP/AAAAAAAABAFAAAAAAAAD/wAAAAAAAAQBgAAAAAAAA=\"" }));
}
Also used : TDigestSketchAggregatorFactory(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) BaseCalciteQueryTest(org.apache.druid.sql.calcite.BaseCalciteQueryTest) Test(org.junit.Test)

Example 79 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class TDigestSketchSqlAggregatorTest method testCompressionParamForTDigestQuantileAgg.

@Test
public void testCompressionParamForTDigestQuantileAgg() throws Exception {
    cannotVectorize();
    testQuery("SELECT\n" + "TDIGEST_QUANTILE(m1, 0.0), TDIGEST_QUANTILE(m1, 0.5, 200), TDIGEST_QUANTILE(m1, 1.0, 300)\n" + "FROM foo", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).granularity(Granularities.ALL).aggregators(ImmutableList.of(new TDigestSketchAggregatorFactory("a0:agg", "m1", TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION), new TDigestSketchAggregatorFactory("a1:agg", "m1", 200), new TDigestSketchAggregatorFactory("a2:agg", "m1", 300))).postAggregators(new TDigestSketchToQuantilePostAggregator("a0", makeFieldAccessPostAgg("a0:agg"), 0.0f), new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.5f), new TDigestSketchToQuantilePostAggregator("a2", makeFieldAccessPostAgg("a2:agg"), 1.0f)).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 1.0, 3.5, 6.0 }));
}
Also used : TDigestSketchAggregatorFactory(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory) TDigestSketchToQuantilePostAggregator(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) BaseCalciteQueryTest(org.apache.druid.sql.calcite.BaseCalciteQueryTest) Test(org.junit.Test)

Example 80 with MultipleIntervalSegmentSpec

use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.

the class TDigestSketchSqlAggregatorTest method testEmptyTimeseriesResults.

@Test
public void testEmptyTimeseriesResults() throws Exception {
    cannotVectorize();
    testQuery("SELECT\n" + "TDIGEST_GENERATE_SKETCH(m1)," + "TDIGEST_QUANTILE(qsketch_m1, 0.1)" + "FROM foo WHERE dim2 = 0", ImmutableList.of(Druids.newTimeseriesQueryBuilder().dataSource(CalciteTests.DATASOURCE1).intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Filtration.eternity()))).filters(bound("dim2", "0", "0", false, false, null, StringComparators.NUMERIC)).granularity(Granularities.ALL).aggregators(ImmutableList.of(new TDigestSketchAggregatorFactory("a0:agg", "m1", TDigestSketchAggregatorFactory.DEFAULT_COMPRESSION), new TDigestSketchAggregatorFactory("a1:agg", "qsketch_m1", 100))).postAggregators(new TDigestSketchToQuantilePostAggregator("a1", makeFieldAccessPostAgg("a1:agg"), 0.1f)).context(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { "\"AAAAAX/wAAAAAAAA//AAAAAAAABAWQAAAAAAAAAAAAA=\"", Double.NaN }));
}
Also used : TDigestSketchAggregatorFactory(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory) TDigestSketchToQuantilePostAggregator(org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) BaseCalciteQueryTest(org.apache.druid.sql.calcite.BaseCalciteQueryTest) Test(org.junit.Test)

Aggregations

MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)142 Test (org.junit.Test)115 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)53 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)44 BaseCalciteQueryTest (org.apache.druid.sql.calcite.BaseCalciteQueryTest)39 QuerySegmentSpec (org.apache.druid.query.spec.QuerySegmentSpec)28 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)25 ResponseContext (org.apache.druid.query.context.ResponseContext)22 QueryRunner (org.apache.druid.query.QueryRunner)21 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)21 QueryPlus (org.apache.druid.query.QueryPlus)20 TableDataSource (org.apache.druid.query.TableDataSource)19 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)18 ArrayList (java.util.ArrayList)17 QueryDataSource (org.apache.druid.query.QueryDataSource)15 DefaultLimitSpec (org.apache.druid.query.groupby.orderby.DefaultLimitSpec)15 OrderByColumnSpec (org.apache.druid.query.groupby.orderby.OrderByColumnSpec)15 Sequence (org.apache.druid.java.util.common.guava.Sequence)14 FieldAccessPostAggregator (org.apache.druid.query.aggregation.post.FieldAccessPostAggregator)14 Result (org.apache.druid.query.Result)13