use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class DataSourceAnalysisTest method testJoinUnderTopLevelSubqueries.
@Test
public void testJoinUnderTopLevelSubqueries() {
final QueryDataSource queryDataSource = subquery(subquery(join(TABLE_FOO, LOOKUP_LOOKYLOO, "1.", JoinType.INNER, TrueDimFilter.instance())));
final DataSourceAnalysis analysis = DataSourceAnalysis.forDataSource(queryDataSource);
Assert.assertTrue(analysis.isConcreteBased());
Assert.assertTrue(analysis.isConcreteTableBased());
Assert.assertFalse(analysis.isGlobal());
Assert.assertTrue(analysis.isQuery());
Assert.assertEquals(queryDataSource, analysis.getDataSource());
Assert.assertEquals(TABLE_FOO, analysis.getBaseDataSource());
Assert.assertEquals(TrueDimFilter.instance(), analysis.getJoinBaseTableFilter().orElse(null));
Assert.assertEquals(Optional.of(TABLE_FOO), analysis.getBaseTableDataSource());
Assert.assertEquals(Optional.empty(), analysis.getBaseUnionDataSource());
Assert.assertEquals(Optional.of(subquery(join(TABLE_FOO, LOOKUP_LOOKYLOO, "1.", JoinType.INNER, TrueDimFilter.instance())).getQuery()), analysis.getBaseQuery());
Assert.assertEquals(Optional.of(new MultipleIntervalSegmentSpec(MILLENIUM_INTERVALS)), analysis.getBaseQuerySegmentSpec());
Assert.assertEquals(ImmutableList.of(new PreJoinableClause("1.", LOOKUP_LOOKYLOO, JoinType.INNER, joinClause("1."))), analysis.getPreJoinableClauses());
Assert.assertTrue(analysis.isJoin());
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class ScanQueryQueryToolChestTest method test_resultArraySignature_columnsSpecified.
@Test
public void test_resultArraySignature_columnsSpecified() {
final ScanQuery scanQuery = Druids.newScanQueryBuilder().dataSource("foo").intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2000/3000")))).columns("foo", "bar").build();
Assert.assertEquals(RowSignature.builder().add("foo", null).add("bar", null).build(), toolChest.resultArraySignature(scanQuery));
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class NestedQueryPushDownTest method testSimpleDoubleAggregation.
@Test
public void testSimpleDoubleAggregation() {
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)));
GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimA", "dimA"), new DefaultDimensionSpec("dimB", "dimB")).setAggregatorSpecs(new LongSumAggregatorFactory("metASum", "metA"), new LongSumAggregatorFactory("metBSum", "metB")).setGranularity(Granularities.ALL).build();
GroupByQuery nestedQuery = GroupByQuery.builder().setDataSource(query).setQuerySegmentSpec(intervalSpec).setDimensions(new DefaultDimensionSpec("dimB", "dimB")).setAggregatorSpecs(new LongSumAggregatorFactory("totalSum", "metASum")).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY, true)).setGranularity(Granularities.ALL).build();
Sequence<ResultRow> queryResult = runNestedQueryWithForcePushDown(nestedQuery);
List<ResultRow> results = queryResult.toList();
ResultRow expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow(nestedQuery, "2017-07-14T02:40:00.000Z", "dimB", "sour", "totalSum", 2000L);
ResultRow expectedRow1 = GroupByQueryRunnerTestHelper.createExpectedRow(nestedQuery, "2017-07-14T02:40:00.000Z", "dimB", "sweet", "totalSum", 6000L);
Assert.assertEquals(2, results.size());
Assert.assertEquals(expectedRow0, results.get(0));
Assert.assertEquals(expectedRow1, results.get(1));
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class NestedQueryPushDownTest method testDimensionFilterOnOuterAndInnerQueries.
@Test
public void testDimensionFilterOnOuterAndInnerQueries() {
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(1500000000000L, 1600000000000L)));
GroupByQuery query = GroupByQuery.builder().setDataSource("blah").setDimensions(new DefaultDimensionSpec("dimA", "dimA"), new DefaultDimensionSpec("dimB", "dimB")).setAggregatorSpecs(new LongSumAggregatorFactory("metASum", "metA"), new LongSumAggregatorFactory("metBSum", "metB")).setGranularity(Granularities.ALL).setQuerySegmentSpec(intervalSpec).setDimFilter(new JavaScriptDimFilter("dimA", "function(dim){ return dim == 'mango' }", null, JavaScriptConfig.getEnabledInstance())).build();
GroupByQuery nestedQuery = GroupByQuery.builder().setDataSource(query).setDimensions(new DefaultDimensionSpec("dimA", "newDimA")).setAggregatorSpecs(new LongSumAggregatorFactory("finalSum", "metASum")).setContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY, true)).setGranularity(Granularities.ALL).setDimFilter(new JavaScriptDimFilter("dimA", "function(dim){ return dim == 'pomegranate' }", null, JavaScriptConfig.getEnabledInstance())).setQuerySegmentSpec(intervalSpec).build();
Sequence<ResultRow> queryResult = runNestedQueryWithForcePushDown(nestedQuery);
List<ResultRow> results = queryResult.toList();
Assert.assertEquals(0, results.size());
}
use of org.apache.druid.query.spec.MultipleIntervalSegmentSpec in project druid by druid-io.
the class GroupByQueryRunnerTest method testMergeResultsWithLimitPushDownSortByAgg.
@Test
public void testMergeResultsWithLimitPushDownSortByAgg() {
if (!config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2)) {
return;
}
GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setLimitSpec(new DefaultLimitSpec(Collections.singletonList(new OrderByColumnSpec("idx", OrderByColumnSpec.Direction.DESCENDING)), 5)).overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)).setGranularity(Granularities.ALL);
final GroupByQuery allGranQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults(new QueryRunner<ResultRow>() {
@Override
public Sequence<ResultRow> run(QueryPlus<ResultRow> queryPlus, ResponseContext responseContext) {
// simulate two daily segments
final QueryPlus<ResultRow> queryPlus1 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-02/2011-04-03")))));
final QueryPlus<ResultRow> queryPlus2 = queryPlus.withQuery(queryPlus.getQuery().withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.of("2011-04-03/2011-04-04")))));
return factory.getToolchest().mergeResults((queryPlus3, responseContext1) -> new MergeSequence<>(queryPlus3.getQuery().getResultOrdering(), Sequences.simple(Arrays.asList(runner.run(queryPlus1, responseContext1), runner.run(queryPlus2, responseContext1))))).run(queryPlus, responseContext);
}
});
List<ResultRow> allGranExpectedResults = Arrays.asList(makeRow(allGranQuery, "2011-04-02", "alias", "mezzanine", "rows", 6L, "idx", 4420L), makeRow(allGranQuery, "2011-04-02", "alias", "premium", "rows", 6L, "idx", 4416L), makeRow(allGranQuery, "2011-04-02", "alias", "entertainment", "rows", 2L, "idx", 319L), makeRow(allGranQuery, "2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), makeRow(allGranQuery, "2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L));
Iterable<ResultRow> results = mergedRunner.run(QueryPlus.wrap(allGranQuery)).toList();
TestHelper.assertExpectedObjects(allGranExpectedResults, results, "merged");
}
Aggregations