Search in sources :

Example 1 with HiveJdbcConverter

use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.

the class ASTBuilder method table.

public static ASTNode table(final RelNode scan) {
    HiveTableScan hts = null;
    if (scan instanceof HiveJdbcConverter) {
        hts = ((HiveJdbcConverter) scan).getTableScan().getHiveTableScan();
    } else if (scan instanceof DruidQuery) {
        hts = (HiveTableScan) ((DruidQuery) scan).getTableScan();
    } else {
        hts = (HiveTableScan) scan;
    }
    assert hts != null;
    RelOptHiveTable hTbl = (RelOptHiveTable) hts.getTable();
    ASTBuilder tableNameBuilder = ASTBuilder.construct(HiveParser.TOK_TABNAME, "TOK_TABNAME").add(HiveParser.Identifier, hTbl.getHiveTableMD().getDbName()).add(HiveParser.Identifier, hTbl.getHiveTableMD().getTableName());
    if (hTbl.getHiveTableMD().getMetaTable() != null) {
        tableNameBuilder.add(HiveParser.Identifier, hTbl.getHiveTableMD().getMetaTable());
    }
    ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_TABREF, "TOK_TABREF").add(tableNameBuilder);
    if (hTbl.getHiveTableMD().getAsOfTimestamp() != null) {
        ASTBuilder asOfBuilder = ASTBuilder.construct(HiveParser.TOK_AS_OF_TIME, "TOK_AS_OF_TIME").add(HiveParser.StringLiteral, hTbl.getHiveTableMD().getAsOfTimestamp());
        b.add(asOfBuilder);
    }
    if (hTbl.getHiveTableMD().getAsOfVersion() != null) {
        ASTBuilder asOfBuilder = ASTBuilder.construct(HiveParser.TOK_AS_OF_VERSION, "TOK_AS_OF_VERSION").add(HiveParser.Number, hTbl.getHiveTableMD().getAsOfVersion());
        b.add(asOfBuilder);
    }
    ASTBuilder propList = ASTBuilder.construct(HiveParser.TOK_TABLEPROPLIST, "TOK_TABLEPROPLIST");
    if (scan instanceof DruidQuery) {
        // Passing query spec, column names and column types to be used as part of Hive Physical execution
        DruidQuery dq = (DruidQuery) scan;
        // Adding Query specs to be used by org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_JSON + "\"").add(HiveParser.StringLiteral, "\"" + SemanticAnalyzer.escapeSQLString(dq.getQueryString()) + "\""));
        // Adding column names used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_FIELD_NAMES + "\"").add(HiveParser.StringLiteral, "\"" + dq.getRowType().getFieldNames().stream().map(Object::toString).collect(Collectors.joining(",")) + "\""));
        // Adding column types used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_FIELD_TYPES + "\"").add(HiveParser.StringLiteral, "\"" + dq.getRowType().getFieldList().stream().map(e -> TypeConverter.convert(e.getType()).getTypeName()).collect(Collectors.joining(",")) + "\""));
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_TYPE + "\"").add(HiveParser.StringLiteral, "\"" + dq.getQueryType().getQueryName() + "\""));
    } else if (scan instanceof HiveJdbcConverter) {
        HiveJdbcConverter jdbcConverter = (HiveJdbcConverter) scan;
        final String query = jdbcConverter.generateSql();
        LOGGER.debug("Generated SQL query: " + System.lineSeparator() + query);
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY + "\"").add(HiveParser.StringLiteral, "\"" + SemanticAnalyzer.escapeSQLString(query) + "\""));
        // Whether we can split the query
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.JDBC_SPLIT_QUERY + "\"").add(HiveParser.StringLiteral, "\"" + jdbcConverter.splittingAllowed() + "\""));
        // Adding column names used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY_FIELD_NAMES + "\"").add(HiveParser.StringLiteral, "\"" + scan.getRowType().getFieldNames().stream().map(Object::toString).collect(Collectors.joining(",")) + "\""));
        // Adding column types used later by org.apache.hadoop.hive.druid.serde.DruidSerDe
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"" + Constants.JDBC_QUERY_FIELD_TYPES + "\"").add(HiveParser.StringLiteral, "\"" + scan.getRowType().getFieldList().stream().map(e -> TypeConverter.convert(e.getType()).getTypeName()).collect(Collectors.joining(",")) + "\""));
    }
    if (hts.isInsideView()) {
        // We need to carry the insideView information from calcite into the ast.
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, "\"insideView\"").add(HiveParser.StringLiteral, "\"TRUE\""));
    }
    if (hts.getTableScanTrait() != null) {
        // We need to carry the fetchDeletedRows information from calcite into the ast.
        propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY").add(HiveParser.StringLiteral, String.format("\"%s\"", hts.getTableScanTrait().getPropertyKey())).add(HiveParser.StringLiteral, "\"TRUE\""));
    }
    b.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTIES, "TOK_TABLEPROPERTIES").add(propList));
    // NOTE: Calcite considers tbls to be equal if their names are the same. Hence
    // we need to provide Calcite the fully qualified table name (dbname.tblname)
    // and not the user provided aliases.
    // However in HIVE DB name can not appear in select list; in case of join
    // where table names differ only in DB name, Hive would require user
    // introducing explicit aliases for tbl.
    b.add(HiveParser.Identifier, hts.getTableAlias());
    return b.node();
}
Also used : SemanticAnalyzer(org.apache.hadoop.hive.ql.parse.SemanticAnalyzer) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) LoggerFactory(org.slf4j.LoggerFactory) BaseSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer) HiveTableScan(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan) BigDecimal(java.math.BigDecimal) TimeString(org.apache.calcite.util.TimeString) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) DruidQuery(org.apache.calcite.adapter.druid.DruidQuery) Constants(org.apache.hadoop.hive.conf.Constants) Logger(org.slf4j.Logger) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) DateString(org.apache.calcite.util.DateString) RexLiteral(org.apache.calcite.rex.RexLiteral) HiveParser(org.apache.hadoop.hive.ql.parse.HiveParser) TimestampString(org.apache.calcite.util.TimestampString) RelOptHiveTable(org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable) HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) RelNode(org.apache.calcite.rel.RelNode) Collectors(java.util.stream.Collectors) ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) RexDynamicParam(org.apache.calcite.rex.RexDynamicParam) TimestampTZUtil(org.apache.hadoop.hive.common.type.TimestampTZUtil) JoinRelType(org.apache.calcite.rel.core.JoinRelType) DruidQuery(org.apache.calcite.adapter.druid.DruidQuery) RelOptHiveTable(org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable) HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter) TimeString(org.apache.calcite.util.TimeString) DateString(org.apache.calcite.util.DateString) TimestampString(org.apache.calcite.util.TimestampString) HiveTableScan(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan)

Example 2 with HiveJdbcConverter

use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.

the class ASTConverter method convertSource.

private QueryBlockInfo convertSource(RelNode r) throws CalciteSemanticException {
    Schema s = null;
    ASTNode ast = null;
    if (r instanceof TableScan) {
        TableScan f = (TableScan) r;
        s = new Schema(f);
        ast = ASTBuilder.table(f);
        planMapper.link(ast, f);
    } else if (r instanceof HiveJdbcConverter) {
        HiveJdbcConverter f = (HiveJdbcConverter) r;
        s = new Schema(f);
        ast = ASTBuilder.table(f);
    } else if (r instanceof DruidQuery) {
        DruidQuery f = (DruidQuery) r;
        s = new Schema(f);
        ast = ASTBuilder.table(f);
    } else if (r instanceof Join) {
        Join join = (Join) r;
        QueryBlockInfo left = convertSource(join.getLeft());
        QueryBlockInfo right = convertSource(join.getRight());
        s = new Schema(left.schema, right.schema);
        ASTNode cond = join.getCondition().accept(new RexVisitor(s, false, r.getCluster().getRexBuilder()));
        boolean semiJoin = join.isSemiJoin() || join.getJoinType() == JoinRelType.ANTI;
        if (join.getRight() instanceof Join && !semiJoin) {
            // should not be done for semijoin since it will change the semantics
            // Invert join inputs; this is done because otherwise the SemanticAnalyzer
            // methods to merge joins will not kick in
            JoinRelType type;
            if (join.getJoinType() == JoinRelType.LEFT) {
                type = JoinRelType.RIGHT;
            } else if (join.getJoinType() == JoinRelType.RIGHT) {
                type = JoinRelType.LEFT;
            } else {
                type = join.getJoinType();
            }
            ast = ASTBuilder.join(right.ast, left.ast, type, cond);
            addPkFkInfoToAST(ast, join, true);
        } else {
            ast = ASTBuilder.join(left.ast, right.ast, join.getJoinType(), cond);
            addPkFkInfoToAST(ast, join, false);
        }
        if (semiJoin) {
            s = left.schema;
        }
    } else if (r instanceof Union) {
        Union u = ((Union) r);
        ASTNode left = new ASTConverter(((Union) r).getInput(0), this.derivedTableCount, planMapper).convert();
        for (int ind = 1; ind < u.getInputs().size(); ind++) {
            left = getUnionAllAST(left, new ASTConverter(((Union) r).getInput(ind), this.derivedTableCount, planMapper).convert());
            String sqAlias = nextAlias();
            ast = ASTBuilder.subQuery(left, sqAlias);
            s = new Schema((Union) r, sqAlias);
        }
    } else {
        ASTConverter src = new ASTConverter(r, this.derivedTableCount, planMapper);
        ASTNode srcAST = src.convert();
        String sqAlias = nextAlias();
        s = src.getRowSchema(sqAlias);
        ast = ASTBuilder.subQuery(srcAST, sqAlias);
    }
    return new QueryBlockInfo(s, ast);
}
Also used : HiveTableScan(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan) TableScan(org.apache.calcite.rel.core.TableScan) JdbcHiveTableScan(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.JdbcHiveTableScan) DruidQuery(org.apache.calcite.adapter.druid.DruidQuery) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Join(org.apache.calcite.rel.core.Join) Union(org.apache.calcite.rel.core.Union) JoinRelType(org.apache.calcite.rel.core.JoinRelType) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter)

Example 3 with HiveJdbcConverter

use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.

the class JDBCJoinPushDownRule method onMatch.

@Override
public void onMatch(RelOptRuleCall call) {
    LOG.debug("JDBCJoinPushDownRule has been called");
    final HiveJoin join = call.rel(0);
    final HiveJdbcConverter converter1 = call.rel(1);
    final RelNode input1 = converter1.getInput();
    final HiveJdbcConverter converter2 = call.rel(2);
    final RelNode input2 = converter2.getInput();
    JdbcJoin jdbcJoin;
    try {
        jdbcJoin = new JdbcJoin(join.getCluster(), join.getTraitSet().replace(converter1.getJdbcConvention()), input1, input2, join.getCondition(), join.getVariablesSet(), join.getJoinType());
    } catch (InvalidRelException e) {
        LOG.warn(e.toString());
        return;
    }
    call.transformTo(converter1.copy(converter1.getTraitSet(), jdbcJoin));
}
Also used : InvalidRelException(org.apache.calcite.rel.InvalidRelException) JdbcJoin(org.apache.calcite.adapter.jdbc.JdbcRules.JdbcJoin) RelNode(org.apache.calcite.rel.RelNode) HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter) HiveJoin(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin)

Example 4 with HiveJdbcConverter

use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.

the class JDBCUnionPushDownRule method matches.

@Override
public boolean matches(RelOptRuleCall call) {
    final HiveUnion union = call.rel(0);
    final HiveJdbcConverter converter1 = call.rel(1);
    final HiveJdbcConverter converter2 = call.rel(2);
    // First we compare the convention
    if (!converter1.getJdbcConvention().getName().equals(converter2.getJdbcConvention().getName())) {
        return false;
    }
    // Second, we compare the connection string
    if (!converter1.getConnectionUrl().equals(converter2.getConnectionUrl())) {
        return false;
    }
    // Third, we compare the connection user
    if (!converter1.getConnectionUser().equals(converter2.getConnectionUser())) {
        return false;
    }
    return union.getInputs().size() == 2;
}
Also used : HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter) HiveUnion(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion)

Example 5 with HiveJdbcConverter

use of org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter in project hive by apache.

the class JDBCAggregationPushDownRule method matches.

@Override
public boolean matches(RelOptRuleCall call) {
    final HiveAggregate agg = call.rel(0);
    final HiveJdbcConverter converter = call.rel(1);
    if (agg.getGroupType() != Group.SIMPLE) {
        // TODO: Grouping sets not supported yet
        return false;
    }
    for (AggregateCall relOptRuleOperand : agg.getAggCallList()) {
        SqlAggFunction f = relOptRuleOperand.getAggregation();
        if (f instanceof HiveSqlCountAggFunction) {
            // count distinct with more that one argument is not supported
            HiveSqlCountAggFunction countAgg = (HiveSqlCountAggFunction) f;
            if (countAgg.isDistinct() && 1 < relOptRuleOperand.getArgList().size()) {
                return false;
            }
        }
        SqlKind kind = f.getKind();
        if (!converter.getJdbcDialect().supportsAggregateFunction(kind)) {
            return false;
        }
    }
    return true;
}
Also used : AggregateCall(org.apache.calcite.rel.core.AggregateCall) HiveAggregate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate) HiveSqlCountAggFunction(org.apache.hadoop.hive.ql.optimizer.calcite.functions.HiveSqlCountAggFunction) HiveJdbcConverter(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) SqlKind(org.apache.calcite.sql.SqlKind)

Aggregations

HiveJdbcConverter (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter)16 RelNode (org.apache.calcite.rel.RelNode)5 RexNode (org.apache.calcite.rex.RexNode)4 HiveJoin (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin)4 Join (org.apache.calcite.rel.core.Join)3 HiveFilter (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter)3 DruidQuery (org.apache.calcite.adapter.druid.DruidQuery)2 JdbcFilter (org.apache.calcite.adapter.jdbc.JdbcRules.JdbcFilter)2 JdbcSort (org.apache.calcite.adapter.jdbc.JdbcRules.JdbcSort)2 InvalidRelException (org.apache.calcite.rel.InvalidRelException)2 Filter (org.apache.calcite.rel.core.Filter)2 JoinRelType (org.apache.calcite.rel.core.JoinRelType)2 HiveAggregate (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate)2 HiveProject (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject)2 HiveTableScan (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan)2 HiveUnion (org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion)2 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 BigDecimal (java.math.BigDecimal)1 Collectors (java.util.stream.Collectors)1 JdbcAggregate (org.apache.calcite.adapter.jdbc.JdbcRules.JdbcAggregate)1