use of org.apache.druid.segment.join.filter.JoinFilterSplit in project druid by druid-io.
the class JoinFilterAnalyzerTest method test_filterPushDown_factToRegionToCountryLeftFilterOnInvalidColumns.
@Test
public void test_filterPushDown_factToRegionToCountryLeftFilterOnInvalidColumns() {
List<JoinableClause> joinableClauses = ImmutableList.of(factToRegion(JoinType.LEFT), regionToCountry(JoinType.LEFT));
Filter originalFilter = new AndFilter(ImmutableList.of(new SelectorFilter("baseTableInvalidColumn", "abcd"), new SelectorFilter("baseTableInvalidColumn2", null), new SelectorFilter("rtc.invalidColumn", "abcd"), new SelectorFilter("r1.invalidColumn", "abcd")));
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(originalFilter, joinableClauses, VirtualColumns.EMPTY);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis);
JoinTestHelper.verifyCursors(adapter.makeCursors(originalFilter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", FACT_TO_REGION_PREFIX + "regionName", REGION_TO_COUNTRY_PREFIX + "countryName"), ImmutableList.of());
JoinFilterSplit expectedFilterSplit = new JoinFilterSplit(new SelectorFilter("baseTableInvalidColumn", "abcd"), new AndFilter(ImmutableList.of(new SelectorFilter("baseTableInvalidColumn2", null), new SelectorFilter("rtc.invalidColumn", "abcd"), new SelectorFilter("r1.invalidColumn", "abcd"))), ImmutableSet.of());
JoinFilterSplit actualFilterSplit = JoinFilterAnalyzer.splitFilter(joinFilterPreAnalysis);
Assert.assertEquals(expectedFilterSplit, actualFilterSplit);
}
use of org.apache.druid.segment.join.filter.JoinFilterSplit in project druid by druid-io.
the class JoinFilterAnalyzerTest method test_filterPushDown_factToRegionOneColumnToTwoRHSColumnsAndFilterOnRHS.
@Test
public void test_filterPushDown_factToRegionOneColumnToTwoRHSColumnsAndFilterOnRHS() {
JoinableClause factExprToRegon = new JoinableClause(FACT_TO_REGION_PREFIX, new IndexedTableJoinable(regionsTable), JoinType.LEFT, JoinConditionAnalysis.forExpression(StringUtils.format("\"%sregionIsoCode\" == regionIsoCode && \"%scountryIsoCode\" == regionIsoCode", FACT_TO_REGION_PREFIX, FACT_TO_REGION_PREFIX), FACT_TO_REGION_PREFIX, ExprMacroTable.nil()));
List<JoinableClause> joinableClauses = ImmutableList.of(factExprToRegon);
Filter originalFilter = new OrFilter(ImmutableList.of(new SelectorFilter("r1.regionName", "Fourems Province"), new SelectorFilter("r1.regionIsoCode", "AAAA")));
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(originalFilter, joinableClauses, VirtualColumns.EMPTY);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis);
JoinTestHelper.verifyCursors(adapter.makeCursors(originalFilter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", FACT_TO_REGION_PREFIX + "regionName"), ImmutableList.of(new Object[] { "History of Fourems", "Fourems Province" }));
JoinFilterSplit expectedFilterSplit = new JoinFilterSplit(new OrFilter(ImmutableList.of(new InDimFilter("regionIsoCode", ImmutableSet.of("MMMM"), null, null).toFilter(), new SelectorFilter("regionIsoCode", "AAAA"))), originalFilter, ImmutableSet.of());
JoinFilterSplit actualFilterSplit = JoinFilterAnalyzer.splitFilter(joinFilterPreAnalysis);
Assert.assertEquals(expectedFilterSplit, actualFilterSplit);
}
use of org.apache.druid.segment.join.filter.JoinFilterSplit in project druid by druid-io.
the class JoinFilterAnalyzerTest method test_filterPushDown_factToCountryRightWithFilterOnChannelAndJoinable.
@Test
public void test_filterPushDown_factToCountryRightWithFilterOnChannelAndJoinable() {
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.RIGHT));
Filter originalFilter = new AndFilter(ImmutableList.of(new SelectorFilter("channel", "#de.wikipedia"), new SelectorFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", "Germany")));
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(originalFilter, joinableClauses, VirtualColumns.EMPTY);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis);
JoinTestHelper.verifyCursors(adapter.makeCursors(originalFilter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", "countryIsoCode", "countryNumber", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryIsoCode", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryNumber"), ImmutableList.of(new Object[] { "Diskussion:Sebastian Schulz", "DE", 3L, "DE", "Germany", 3L }));
JoinFilterSplit expectedFilterSplit = new JoinFilterSplit(new AndFilter(ImmutableList.of(new SelectorFilter("channel", "#de.wikipedia"), new InDimFilter("countryIsoCode", ImmutableSet.of("DE"), null, null).toFilter())), new SelectorFilter(FACT_TO_COUNTRY_ON_ISO_CODE_PREFIX + "countryName", "Germany"), ImmutableSet.of());
JoinFilterSplit actualFilterSplit = JoinFilterAnalyzer.splitFilter(joinFilterPreAnalysis);
Assert.assertEquals(expectedFilterSplit, actualFilterSplit);
}
use of org.apache.druid.segment.join.filter.JoinFilterSplit in project druid by druid-io.
the class JoinFilterAnalyzerTest method test_filterPushDown_factToRegionToCountryLeftFilterOnChannel.
@Test
public void test_filterPushDown_factToRegionToCountryLeftFilterOnChannel() {
Filter originalFilter = new SelectorFilter("channel", "#en.wikipedia");
List<JoinableClause> joinableClauses = ImmutableList.of(factToRegion(JoinType.LEFT), regionToCountry(JoinType.LEFT));
JoinFilterPreAnalysis joinFilterPreAnalysis = makeDefaultConfigPreAnalysis(originalFilter, joinableClauses, VirtualColumns.EMPTY);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(factSegment.asStorageAdapter(), joinableClauses, joinFilterPreAnalysis);
JoinTestHelper.verifyCursors(adapter.makeCursors(originalFilter, Intervals.ETERNITY, VirtualColumns.EMPTY, Granularities.ALL, false, null), ImmutableList.of("page", FACT_TO_REGION_PREFIX + "regionName", REGION_TO_COUNTRY_PREFIX + "countryName"), ImmutableList.of(new Object[] { "Talk:Oswald Tilghman", null, null }, new Object[] { "Peremptory norm", "New South Wales", "Australia" }, new Object[] { "President of India", "California", "United States" }, new Object[] { "Glasgow", "Kingston upon Hull", "United Kingdom" }, new Object[] { "Otjiwarongo Airport", "California", "United States" }, new Object[] { "Sarah Michelle Gellar", "Ontario", "Canada" }, new Object[] { "DirecTV", "North Carolina", "United States" }, new Object[] { "Carlo Curti", "California", "United States" }, new Object[] { "Giusy Ferreri discography", "Provincia di Varese", "Italy" }, new Object[] { "Roma-Bangkok", "Provincia di Varese", "Italy" }, new Object[] { "Old Anatolian Turkish", "Virginia", "United States" }, new Object[] { "Cream Soda", "Ainigriv", "States United" }, new Object[] { "Orange Soda", null, null }, new Object[] { "History of Fourems", "Fourems Province", "Fourems" }));
JoinFilterSplit expectedFilterSplit = new JoinFilterSplit(new SelectorFilter("channel", "#en.wikipedia"), null, ImmutableSet.of());
JoinFilterSplit actualFilterSplit = JoinFilterAnalyzer.splitFilter(joinFilterPreAnalysis);
Assert.assertEquals(expectedFilterSplit, actualFilterSplit);
}
use of org.apache.druid.segment.join.filter.JoinFilterSplit in project druid by druid-io.
the class HashJoinSegmentStorageAdapter method makeCursors.
@Override
public Sequence<Cursor> makeCursors(@Nullable final Filter filter, @Nonnull final Interval interval, @Nonnull final VirtualColumns virtualColumns, @Nonnull final Granularity gran, final boolean descending, @Nullable final QueryMetrics<?> queryMetrics) {
final Filter combinedFilter = baseFilterAnd(filter);
if (clauses.isEmpty()) {
return baseAdapter.makeCursors(combinedFilter, interval, virtualColumns, gran, descending, queryMetrics);
}
// Filter pre-analysis key implied by the call to "makeCursors". We need to sanity-check that it matches
// the actual pre-analysis that was done. Note: we can't infer a rewrite config from the "makeCursors" call (it
// requires access to the query context) so we'll need to skip sanity-checking it, by re-using the one present
// in the cached key.)
final JoinFilterPreAnalysisKey keyIn = new JoinFilterPreAnalysisKey(joinFilterPreAnalysis.getKey().getRewriteConfig(), clauses, virtualColumns, combinedFilter);
final JoinFilterPreAnalysisKey keyCached = joinFilterPreAnalysis.getKey();
if (!keyIn.equals(keyCached)) {
// It is a bug if this happens. The implied key and the cached key should always match.
throw new ISE("Pre-analysis mismatch, cannot execute query");
}
final List<VirtualColumn> preJoinVirtualColumns = new ArrayList<>();
final List<VirtualColumn> postJoinVirtualColumns = new ArrayList<>();
determineBaseColumnsWithPreAndPostJoinVirtualColumns(virtualColumns, preJoinVirtualColumns, postJoinVirtualColumns);
// We merge the filter on base table specified by the user and filter on the base table that is pushed from
// the join
JoinFilterSplit joinFilterSplit = JoinFilterAnalyzer.splitFilter(joinFilterPreAnalysis, baseFilter);
preJoinVirtualColumns.addAll(joinFilterSplit.getPushDownVirtualColumns());
final Sequence<Cursor> baseCursorSequence = baseAdapter.makeCursors(joinFilterSplit.getBaseTableFilter().isPresent() ? joinFilterSplit.getBaseTableFilter().get() : null, interval, VirtualColumns.create(preJoinVirtualColumns), gran, descending, queryMetrics);
Closer joinablesCloser = Closer.create();
return Sequences.<Cursor, Cursor>map(baseCursorSequence, cursor -> {
assert cursor != null;
Cursor retVal = cursor;
for (JoinableClause clause : clauses) {
retVal = HashJoinEngine.makeJoinCursor(retVal, clause, descending, joinablesCloser);
}
return PostJoinCursor.wrap(retVal, VirtualColumns.create(postJoinVirtualColumns), joinFilterSplit.getJoinTableFilter().orElse(null));
}).withBaggage(joinablesCloser);
}
Aggregations