use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class DefaultRequestLogEventTest method testDefaultRequestLogEventToMap.
@Test
public void testDefaultRequestLogEventToMap() {
final String feed = "test";
final DateTime timestamp = DateTimes.of(2019, 12, 12, 3, 1);
final String service = "druid-service";
final String host = "127.0.0.1";
final Query query = new TimeseriesQuery(new TableDataSource("dummy"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), true, VirtualColumns.EMPTY, null, Granularities.ALL, ImmutableList.of(), ImmutableList.of(), 5, ImmutableMap.of("key", "value"));
final QueryStats queryStats = new QueryStats(ImmutableMap.of("query/time", 13L, "query/bytes", 10L, "success", true, "identity", "allowAll"));
RequestLogLine nativeLine = RequestLogLine.forNative(query, timestamp, host, queryStats);
DefaultRequestLogEvent defaultRequestLogEvent = new DefaultRequestLogEvent(ImmutableMap.of("service", service, "host", host), feed, nativeLine);
final Map<String, Object> expected = new HashMap<>();
expected.put("feed", feed);
expected.put("timestamp", timestamp);
expected.put("service", service);
expected.put("host", host);
expected.put("query", query);
expected.put("remoteAddr", host);
expected.put("queryStats", queryStats);
Assert.assertEquals(expected, defaultRequestLogEvent.toMap());
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class CalciteQueryTest method testExactCountDistinctOfSemiJoinResult.
@Test
public void testExactCountDistinctOfSemiJoinResult() throws Exception {
// Cannot vectorize due to extraction dimension spec.
cannotVectorize();
testQuery("SELECT COUNT(*)\n" + "FROM (\n" + " SELECT DISTINCT dim2\n" + " FROM druid.foo\n" + " WHERE SUBSTRING(dim2, 1, 1) IN (\n" + " SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 <> ''\n" + " ) AND __time >= '2000-01-01' AND __time < '2002-01-01'\n" + ")", ImmutableList.of(GroupByQuery.builder().setDataSource(new QueryDataSource(GroupByQuery.builder().setDataSource(join(new TableDataSource(CalciteTests.DATASOURCE1), new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE1).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(not(selector("dim1", "", null))).setDimensions(dimensions(new ExtractionDimensionSpec("dim1", "d0", new SubstringDimExtractionFn(0, 1)))).setContext(QUERY_CONTEXT_DEFAULT).build()), "j0.", equalsCondition(makeExpression("substring(\"dim2\", 0, 1)"), DruidExpression.ofColumn(ColumnType.STRING, "j0.d0")), JoinType.INNER)).setInterval(querySegmentSpec(Intervals.of("2000-01-01/2002-01-01"))).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("dim2", "d0"))).setContext(QUERY_CONTEXT_DEFAULT).build())).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setAggregatorSpecs(aggregators(new CountAggregatorFactory("a0"))).setContext(QUERY_CONTEXT_DEFAULT).build()), ImmutableList.of(new Object[] { 2L }));
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class CalciteArraysQueryTest method testArrayAggAsArrayFromJoin.
@Test
public void testArrayAggAsArrayFromJoin() throws Exception {
cannotVectorize();
List<Object[]> expectedResults;
if (useDefault) {
expectedResults = ImmutableList.of(new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "a", "[\"10.1\",\"2\"]", "10.1,2" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" });
} else {
expectedResults = ImmutableList.of(new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "a", "[\"\",\"10.1\",\"2\"]", ",10.1,2" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" }, new Object[] { "b", "[\"1\",\"abc\",\"def\"]", "1,abc,def" });
}
testQuery("SELECT numfoo.dim4, j.arr, ARRAY_TO_STRING(j.arr, ',') FROM numfoo INNER JOIN (SELECT dim4, ARRAY_AGG(DISTINCT dim1) as arr FROM numfoo WHERE dim1 is not null GROUP BY 1) as j ON numfoo.dim4 = j.dim4", ImmutableList.of(Druids.newScanQueryBuilder().dataSource(join(new TableDataSource(CalciteTests.DATASOURCE3), new QueryDataSource(GroupByQuery.builder().setDataSource(CalciteTests.DATASOURCE3).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(not(selector("dim1", null, null))).setDimensions(new DefaultDimensionSpec("dim4", "_d0")).setAggregatorSpecs(aggregators(new ExpressionLambdaAggregatorFactory("a0", ImmutableSet.of("dim1"), "__acc", "ARRAY<STRING>[]", "ARRAY<STRING>[]", true, true, false, "array_set_add(\"__acc\", \"dim1\")", "array_set_add_all(\"__acc\", \"a0\")", null, null, ExpressionLambdaAggregatorFactory.DEFAULT_MAX_SIZE_BYTES, TestExprMacroTable.INSTANCE))).setContext(QUERY_CONTEXT_DEFAULT).build()), "j0.", "(\"dim4\" == \"j0._d0\")", JoinType.INNER, null)).virtualColumns(expressionVirtualColumn("v0", "array_to_string(\"j0.a0\",',')", ColumnType.STRING)).intervals(querySegmentSpec(Filtration.eternity())).columns("dim4", "j0.a0", "v0").context(QUERY_CONTEXT_DEFAULT).resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST).legacy(false).build()), expectedResults);
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class CalciteArraysQueryTest method testArrayAggGroupByArrayAggFromSubquery.
@Test
public void testArrayAggGroupByArrayAggFromSubquery() throws Exception {
cannotVectorize();
testQuery("SELECT dim2, arr, COUNT(*) FROM (SELECT dim2, ARRAY_AGG(DISTINCT dim1) as arr FROM foo WHERE dim1 is not null GROUP BY 1 LIMIT 5) GROUP BY 1,2", QUERY_CONTEXT_NO_STRINGIFY_ARRAY, ImmutableList.of(GroupByQuery.builder().setDataSource(new TopNQuery(new TableDataSource(CalciteTests.DATASOURCE1), null, new DefaultDimensionSpec("dim2", "d0", ColumnType.STRING), new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC), 5, querySegmentSpec(Filtration.eternity()), new NotDimFilter(new SelectorDimFilter("dim1", null, null)), Granularities.ALL, aggregators(new ExpressionLambdaAggregatorFactory("a0", ImmutableSet.of("dim1"), "__acc", "ARRAY<STRING>[]", "ARRAY<STRING>[]", true, true, false, "array_set_add(\"__acc\", \"dim1\")", "array_set_add_all(\"__acc\", \"a0\")", null, null, new HumanReadableBytes(1024), ExprMacroTable.nil())), null, QUERY_CONTEXT_NO_STRINGIFY_ARRAY)).setInterval(querySegmentSpec(Filtration.eternity())).setGranularity(Granularities.ALL).setDimFilter(null).setGranularity(Granularities.ALL).setDimensions(dimensions(new DefaultDimensionSpec("d0", "_d0", ColumnType.STRING), new DefaultDimensionSpec("a0", "_d1", ColumnType.STRING_ARRAY))).setAggregatorSpecs(aggregators(new CountAggregatorFactory("_a0"))).setContext(QUERY_CONTEXT_NO_STRINGIFY_ARRAY).build()), useDefault ? ImmutableList.of(new Object[] { "", ImmutableList.of("10.1", "2", "abc"), 1L }, new Object[] { "a", ImmutableList.of("1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }) : ImmutableList.of(new Object[] { null, ImmutableList.of("10.1", "abc"), 1L }, new Object[] { "", ImmutableList.of("2"), 1L }, new Object[] { "a", ImmutableList.of("", "1"), 1L }, new Object[] { "abc", ImmutableList.of("def"), 1L }));
}
use of org.apache.druid.query.TableDataSource in project druid by druid-io.
the class InlineSegmentWranglerTest method test_getSegmentsForIntervals_nonInline.
@Test
public void test_getSegmentsForIntervals_nonInline() {
expectedException.expect(ClassCastException.class);
expectedException.expectMessage("TableDataSource cannot be cast");
final Iterable<Segment> ignored = factory.getSegmentsForIntervals(new TableDataSource("foo"), Intervals.ONLY_ETERNITY);
}
Aggregations