use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.
the class JDBCJoinPushDownRule method matches.
@Override
public boolean matches(RelOptRuleCall call) {
final HiveJoin join = call.rel(0);
final RexNode cond = join.getCondition();
final HiveJdbcConverter converter1 = call.rel(1);
final HiveJdbcConverter converter2 = call.rel(2);
// First we compare the convention
if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
return false;
}
// Second, we compare the connection string
if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) {
return false;
}
// Third, we compare the connection user
if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) {
return false;
}
// We do not push cross join
if (cond.isAlwaysTrue()) {
return false;
}
return JDBCRexCallValidator.isValidJdbcOperation(cond, converter1.getJdbcDialect());
}
use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.
the class JDBCUnionPushDownRule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
LOG.debug("JDBCUnionPushDown has been called");
final HiveUnion union = call.rel(0);
final HiveJdbcConverter converter1 = call.rel(1);
final HiveJdbcConverter converter2 = call.rel(2);
List<RelNode> unionInput = Arrays.asList(converter1.getInput(), converter2.getInput());
JdbcUnion jdbcUnion = new JdbcUnion(union.getCluster(), union.getTraitSet().replace(converter1.getJdbcConvention()), unionInput, union.all);
call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcUnion));
}
use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.
the class JDBCAggregationPushDownRule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
LOG.debug("JDBCAggregationPushDownRule has been called");
final HiveAggregate aggregate = call.rel(0);
final HiveJdbcConverter converter = call.rel(1);
JdbcAggregate jdbcAggregate;
try {
jdbcAggregate = new JdbcAggregate(aggregate.getCluster(), aggregate.getTraitSet().replace(converter.getJdbcConvention()), converter.getInput(), aggregate.indicator, aggregate.getGroupSet(), aggregate.getGroupSets(), aggregate.getAggCallList());
} catch (InvalidRelException e) {
LOG.warn(e.toString());
return;
}
call.transformTo(converter.copy(converter.getTraitSet(), jdbcAggregate));
}
use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.
the class JDBCFilterPushDownRule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
LOG.debug("JDBCFilterPushDown has been called");
final HiveFilter filter = call.rel(0);
final HiveJdbcConverter converter = call.rel(1);
Filter newHiveFilter = filter.copy(filter.getTraitSet(), converter.getInput(), filter.getCondition());
JdbcFilter newJdbcFilter = (JdbcFilter) JdbcFilterRule.create(converter.getJdbcConvention()).convert(newHiveFilter);
if (newJdbcFilter != null) {
RelNode converterRes = converter.copy(converter.getTraitSet(), Arrays.asList(newJdbcFilter));
call.transformTo(converterRes);
}
}
use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.
the class JDBCProjectPushDownRule method onMatch.
@Override
public void onMatch(RelOptRuleCall call) {
LOG.debug("JDBCProjectPushDownRule has been called");
final HiveProject project = call.rel(0);
final HiveJdbcConverter converter = call.rel(1);
JdbcProject jdbcProject = new JdbcProject(project.getCluster(), project.getTraitSet().replace(converter.getJdbcConvention()), converter.getInput(), project.getProjects(), project.getRowType());
call.transformTo(converter.copy(converter.getTraitSet(), jdbcProject));
}
Aggregations