use of org.apache.druid.query.planning.PreJoinableClause in project druid by druid-io.
the class JoinableFactoryWrapper method computeJoinDataSourceCacheKey.
/**
* Compute a cache key prefix for a join data source. This includes the data sources that participate in the RHS of a
* join as well as any query specific constructs associated with join data source such as base table filter. This key prefix
* can be used in segment level cache or result level cache. The function can return following wrapped in an
* Optional
* - Non-empty byte array - If there is join datasource involved and caching is possible. The result includes
* join condition expression, join type and cache key returned by joinable factory for each {@link PreJoinableClause}
* - NULL - There is a join but caching is not possible. It may happen if one of the participating datasource
* in the JOIN is not cacheable.
*
* @param dataSourceAnalysis for the join datasource
*
* @return the optional cache key to be used as part of query cache key
*
* @throws {@link IAE} if this operation is called on a non-join data source
*/
public Optional<byte[]> computeJoinDataSourceCacheKey(final DataSourceAnalysis dataSourceAnalysis) {
final List<PreJoinableClause> clauses = dataSourceAnalysis.getPreJoinableClauses();
if (clauses.isEmpty()) {
throw new IAE("No join clauses to build the cache key for data source [%s]", dataSourceAnalysis.getDataSource());
}
final CacheKeyBuilder keyBuilder;
keyBuilder = new CacheKeyBuilder(JOIN_OPERATION);
if (dataSourceAnalysis.getJoinBaseTableFilter().isPresent()) {
keyBuilder.appendCacheable(dataSourceAnalysis.getJoinBaseTableFilter().get());
}
for (PreJoinableClause clause : clauses) {
Optional<byte[]> bytes = joinableFactory.computeJoinCacheKey(clause.getDataSource(), clause.getCondition());
if (!bytes.isPresent()) {
// Encountered a data source which didn't support cache yet
log.debug("skipping caching for join since [%s] does not support caching", clause.getDataSource());
return Optional.empty();
}
keyBuilder.appendByteArray(bytes.get());
keyBuilder.appendString(clause.getCondition().getOriginalExpression());
keyBuilder.appendString(clause.getPrefix());
keyBuilder.appendString(clause.getJoinType().name());
}
return Optional.of(keyBuilder.build());
}
use of org.apache.druid.query.planning.PreJoinableClause in project druid by druid-io.
the class JoinableClauses method createClauses.
/**
* Builds a list of {@link JoinableClause} corresponding to a list of {@link PreJoinableClause}. This will call
* {@link JoinableFactory#build} on each one and therefore may be an expensive operation.
*/
public static JoinableClauses createClauses(final List<PreJoinableClause> preClauses, final JoinableFactory joinableFactory) {
// Since building a JoinableClause can be expensive, check for prefix conflicts before building
checkPreJoinableClausesForDuplicatesAndShadowing(preClauses);
List<JoinableClause> joinableClauses = preClauses.stream().map(preJoinableClause -> {
final Optional<Joinable> joinable = joinableFactory.build(preJoinableClause.getDataSource(), preJoinableClause.getCondition());
return new JoinableClause(preJoinableClause.getPrefix(), joinable.orElseThrow(() -> new ISE("dataSource is not joinable: %s", preJoinableClause.getDataSource())), preJoinableClause.getJoinType(), preJoinableClause.getCondition());
}).collect(Collectors.toList());
return new JoinableClauses(joinableClauses);
}
use of org.apache.druid.query.planning.PreJoinableClause in project druid by druid-io.
the class JoinableFactoryWrapperTest method test_computeJoinDataSourceCacheKey_keyChangesWithExpression.
@Test
public void test_computeJoinDataSourceCacheKey_keyChangesWithExpression() {
DataSourceAnalysis analysis = EasyMock.mock(DataSourceAnalysis.class);
EasyMock.expect(analysis.getJoinBaseTableFilter()).andReturn(Optional.empty()).anyTimes();
JoinableFactoryWrapper joinableFactoryWrapper = new JoinableFactoryWrapper(new JoinableFactoryWithCacheKey());
PreJoinableClause clause1 = makeGlobalPreJoinableClause("dataSource_1", "y == \"j.y\"", "j.");
EasyMock.expect(analysis.getPreJoinableClauses()).andReturn(Collections.singletonList(clause1)).anyTimes();
EasyMock.replay(analysis);
Optional<byte[]> cacheKey1 = joinableFactoryWrapper.computeJoinDataSourceCacheKey(analysis);
Assert.assertTrue(cacheKey1.isPresent());
Assert.assertNotEquals(0, cacheKey1.get().length);
PreJoinableClause clause2 = makeGlobalPreJoinableClause("dataSource_1", "x == \"j.x\"", "j.");
EasyMock.reset(analysis);
EasyMock.expect(analysis.getPreJoinableClauses()).andReturn(Collections.singletonList(clause2)).anyTimes();
EasyMock.expect(analysis.getJoinBaseTableFilter()).andReturn(Optional.empty()).anyTimes();
EasyMock.replay(analysis);
Optional<byte[]> cacheKey2 = joinableFactoryWrapper.computeJoinDataSourceCacheKey(analysis);
Assert.assertTrue(cacheKey2.isPresent());
Assert.assertFalse(Arrays.equals(cacheKey1.get(), cacheKey2.get()));
}
use of org.apache.druid.query.planning.PreJoinableClause in project druid by druid-io.
the class JoinableFactoryWrapperTest method test_createSegmentMapFn_usableClause.
@Test
public void test_createSegmentMapFn_usableClause() {
final LookupDataSource lookupDataSource = new LookupDataSource("lookyloo");
final JoinConditionAnalysis conditionAnalysis = JoinConditionAnalysis.forExpression("x == \"j.x\"", "j.", ExprMacroTable.nil());
final PreJoinableClause clause = new PreJoinableClause("j.", lookupDataSource, JoinType.LEFT, conditionAnalysis);
JoinableFactoryWrapper joinableFactoryWrapper = new JoinableFactoryWrapper(new JoinableFactory() {
@Override
public boolean isDirectlyJoinable(DataSource dataSource) {
return dataSource.equals(lookupDataSource);
}
@Override
public Optional<Joinable> build(DataSource dataSource, JoinConditionAnalysis condition) {
if (dataSource.equals(lookupDataSource) && condition.equals(conditionAnalysis)) {
return Optional.of(LookupJoinable.wrap(new MapLookupExtractor(ImmutableMap.of("k", "v"), false)));
} else {
return Optional.empty();
}
}
});
final Function<SegmentReference, SegmentReference> segmentMapFn = joinableFactoryWrapper.createSegmentMapFn(null, ImmutableList.of(clause), new AtomicLong(), new TestQuery(new TableDataSource("test"), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, new HashMap()));
Assert.assertNotSame(Function.identity(), segmentMapFn);
}
use of org.apache.druid.query.planning.PreJoinableClause in project druid by druid-io.
the class JoinableFactoryWrapperTest method test_computeJoinDataSourceCacheKey_noHashJoin.
@Test
public void test_computeJoinDataSourceCacheKey_noHashJoin() {
PreJoinableClause clause1 = makeGlobalPreJoinableClause("dataSource_1", "x == \"j.x\"", "j.");
PreJoinableClause clause2 = makeGlobalPreJoinableClause("dataSource_2", "x != \"h.x\"", "h.");
DataSourceAnalysis analysis = EasyMock.mock(DataSourceAnalysis.class);
EasyMock.expect(analysis.getPreJoinableClauses()).andReturn(Arrays.asList(clause1, clause2)).anyTimes();
EasyMock.expect(analysis.getJoinBaseTableFilter()).andReturn(Optional.of(TrueDimFilter.instance())).anyTimes();
EasyMock.replay(analysis);
JoinableFactoryWrapper joinableFactoryWrapper = new JoinableFactoryWrapper(new JoinableFactoryWithCacheKey());
Optional<byte[]> cacheKey = joinableFactoryWrapper.computeJoinDataSourceCacheKey(analysis);
Assert.assertFalse(cacheKey.isPresent());
}
Aggregations