Search in sources :

Example 86 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class DefaultRequestLogEventTest method testDefaultRequestLogEventToMap.

@Test
public void testDefaultRequestLogEventToMap() {
    final String feed = "test";
    final DateTime timestamp = DateTimes.of(2019, 12, 12, 3, 1);
    final String service = "druid-service";
    final String host = "127.0.0.1";
    final Query query = new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), true, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(), ImmutableList.of(), 5, ImmutableMap.of("key", "value"));
    final QueryStats queryStats = new QueryStats(ImmutableMap.of("query/time", 13L, "query/bytes", 10L, "success", true, "identity", "allowAll"));
    RequestLogLine nativeLine = RequestLogLine.forNative(query, timestamp, host, queryStats);
    DefaultRequestLogEvent defaultRequestLogEvent = new DefaultRequestLogEvent(ImmutableMap.of("service", service, "host", host), feed, nativeLine);
    final Map<String, Object> expected = new HashMap<>();
    expected.put("feed", feed);
    expected.put("timestamp", timestamp);
    expected.put("service", service);
    expected.put("host", host);
    expected.put("query", query);
    expected.put("remoteAddr", host);
    expected.put("queryStats", queryStats);
    Assert.assertEquals(expected, defaultRequestLogEvent.toMap());
}
Also used : TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) Query(org.apache.druid.query.Query) TimeseriesQuery(org.apache.druid.query.timeseries.TimeseriesQuery) TableDataSource(org.apache.druid.query.TableDataSource) QueryStats(org.apache.druid.server.QueryStats) HashMap(java.util.HashMap) RequestLogLine(org.apache.druid.server.RequestLogLine) MultipleIntervalSegmentSpec(org.apache.druid.query.spec.MultipleIntervalSegmentSpec) DateTime(org.joda.time.DateTime) Test(org.junit.Test)

Example 87 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class CalciteQueryTest method testExactCountDistinctOfSemiJoinResult.

@Test
public void testExactCountDistinctOfSemiJoinResult() throws Exception {
    // Cannot vectorize due to extraction dimension spec.
    cannotVectorize();
    testQuery("SELECT COUNT(*)\n" + "FROM (\n" + "  SELECT DISTINCT dim2\n" + "  FROM druid.foo\n" + "  WHERE SUBSTRING(dim2, 1, 1) IN (\n" + "    SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 <> ''\n" + "  ) AND __time >= '2000-01-01' AND __time < '2002-01-01'\n" + ")", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(join(new TableDataSource(CalciteTests.DATASOURCE1), new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(not(selector("dim1", "", null))).setDimensions(dimensions(new ExtractionDimensionSpec("dim1", "d0", new SubstringDimExtractionFn(0, 1)))).setContext(QUERY_CONTEXT_DEFAULT).build()), "j0.", equalsCondition(makeExpression("substring(\"dim2\", 0, 1)"), DruidExpression.ofColumn(ColumnType.STRING, "j0.d0")), JoinType.INNER)).setInterval(querySegmentSpec(Intervals.of("2000-01-01/2002-01-01"))).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("dim2", "d0"))).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 2L }));
}
Also used : SubstringDimExtractionFn(org.apache.druid.query.extraction.SubstringDimExtractionFn) QueryDataSource(org.apache.druid.query.QueryDataSource) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) ExtractionDimensionSpec(org.apache.druid.query.dimension.ExtractionDimensionSpec) Test(org.junit.Test)

Example 88 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class CalciteArraysQueryTest method testArrayAggAsArrayFromJoin.

@Test
public void testArrayAggAsArrayFromJoin() throws Exception {
    cannotVectorize();
    List<Object[]> expectedResults;
    if (useDefault) {
        expectedResults = ImmutableList.of(new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" });
    } else {
        expectedResults = ImmutableList.of(new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" });
    }
    testQuery("SELECT numfoo.dim4, j.arr, ARRAY_TO_STRING(j.arr, ',') FROM numfoo INNER JOIN (SELECT dim4, ARRAY_AGG(DISTINCT dim1) as arr FROM numfoo WHERE dim1 is not null GROUP BY 1) as j ON numfoo.dim4 = j.dim4", ImmutableList.of(Druids.newScanQueryBuilder().dataSource(join(new TableDataSource(CalciteTests.DATASOURCE3), new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(not(selector("dim1", null, null))).setDimensions(new DefaultDimensionSpec("dim4", "_d0")).setAggregatorSpecs(aggregators(new ExpressionLambdaAggregatorFactory("a0", ImmutableSet.of("dim1"), "__acc", "ARRAY<STRING>[]", "ARRAY<STRING>[]", true, true, false, "array_set_add(\"__acc\", \"dim1\")", "array_set_add_all(\"__acc\", \"a0\")", null, null, ExpressionLambdaAggregatorFactory.DEFAULT_MAX_SIZE_BYTES, TestExprMacroTable.INSTANCE))).setContext(QUERY_CONTEXT_DEFAULT).build()), "j0.", "(\"dim4\" == \"j0._d0\")", JoinType.INNER, null)).virtualColumns(expressionVirtualColumn("v0", "array_to_string(\"j0.a0\",',')", ColumnType.STRING)).intervals(querySegmentSpec(Filtration.eternity())).columns("dim4", "j0.a0", "v0").context(QUERY_CONTEXT_DEFAULT).resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).legacy(false).build()), expectedResults);
}
Also used : ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) TableDataSource(org.apache.druid.query.TableDataSource) QueryDataSource(org.apache.druid.query.QueryDataSource) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 89 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class CalciteArraysQueryTest method testArrayAggGroupByArrayAggFromSubquery.

@Test
public void testArrayAggGroupByArrayAggFromSubquery() throws Exception {
    cannotVectorize();
    testQuery("SELECT dim2, arr, COUNT(*) FROM (SELECT dim2, ARRAY_AGG(DISTINCT dim1) as arr FROM foo WHERE dim1 is not null GROUP BY 1 LIMIT 5) GROUP BY 1,2", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(new TopNQuery(new TableDataSource(CalciteTests.DATASOURCE1), null, new DefaultDimensionSpec("dim2", "d0", ColumnType.STRING), new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC), 5, querySegmentSpec(Filtration.eternity()), new NotDimFilter(new SelectorDimFilter("dim1", null, null)), Granularities.ALL, aggregators(new ExpressionLambdaAggregatorFactory("a0", ImmutableSet.of("dim1"), "__acc", "ARRAY<STRING>[]", "ARRAY<STRING>[]", true, true, false, "array_set_add(\"__acc\", \"dim1\")", "array_set_add_all(\"__acc\", \"a0\")", null, null, new HumanReadableBytes(1024), ExprMacroTable.nil())), null, QUERY_CONTEXT_NO_STRINGIFY_ARRAY)).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(null).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("d0", "_d0", ColumnType.STRING), new DefaultDimensionSpec("a0", "_d1", ColumnType.STRING_ARRAY))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("_a0"))).setContext(QUERY_CONTEXT_NO_STRINGIFY_ARRAY).build()), useDefault ? ImmutableList.of(new Object[] { "", ImmutableList.of("10.1", "2", "abc"), 1L }, new Object[] { "a", ImmutableList.of("1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }) : ImmutableList.of(new Object[] { null, ImmutableList.of("10.1", "abc"), 1L }, new Object[] { "", ImmutableList.of("2"), 1L }, new Object[] { "a", ImmutableList.of("", "1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }));
}
Also used : DimensionTopNMetricSpec(org.apache.druid.query.topn.DimensionTopNMetricSpec) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) TableDataSource(org.apache.druid.query.TableDataSource) CountAggregatorFactory(org.apache.druid.query.aggregation.CountAggregatorFactory) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) TopNQuery(org.apache.druid.query.topn.TopNQuery) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) Test(org.junit.Test)

Example 90 with TableDataSource

use of org.apache.druid.query.TableDataSource in project druid by druid-io.

the class InlineSegmentWranglerTest method test_getSegmentsForIntervals_nonInline.

@Test
public void test_getSegmentsForIntervals_nonInline() {
    expectedException.expect(ClassCastException.class);
    expectedException.expectMessage("TableDataSource cannot be cast");
    final Iterable<Segment> ignored = factory.getSegmentsForIntervals(new TableDataSource("foo"), Intervals.ONLY_ETERNITY);
}
Also used : TableDataSource(org.apache.druid.query.TableDataSource) Test(org.junit.Test)

Aggregations

TableDataSource (org.apache.druid.query.TableDataSource)118 Test (org.junit.Test)94 GlobalTableDataSource (org.apache.druid.query.GlobalTableDataSource)46 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)43 QueryDataSource (org.apache.druid.query.QueryDataSource)41 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)40 Parameters (junitparams.Parameters)30 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)19 LookupDataSource (org.apache.druid.query.LookupDataSource)18 DataSegment (org.apache.druid.timeline.DataSegment)15 Result (org.apache.druid.query.Result)14 CountDownLatch (java.util.concurrent.CountDownLatch)11 Query (org.apache.druid.query.Query)11 TimelineObjectHolder (org.apache.druid.timeline.TimelineObjectHolder)11 Interval (org.joda.time.Interval)11 SelectorDimFilter (org.apache.druid.query.filter.SelectorDimFilter)10 ArrayList (java.util.ArrayList)9 GroupByQuery (org.apache.druid.query.groupby.GroupByQuery)9 ISE (org.apache.druid.java.util.common.ISE)8 SegmentDescriptor (org.apache.druid.query.SegmentDescriptor)8