Search in sources :

Example 1 with HiveLoadDataStatement

use of com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement in project druid by alibaba.

the class HiveStatementParser method parseStatementListDialect.

public boolean parseStatementListDialect(List<SQLStatement> statementList) {
    if (lexer.token() == Token.HINT) {
        List<SQLCommentHint> hints = this.exprParser.parseHints();
        boolean tddlHints = false;
        boolean accept = false;
        boolean acceptHint = false;
        switch(lexer.token()) {
            case SELECT:
            case WITH:
            case DELETE:
            case UPDATE:
            case INSERT:
            case SHOW:
            case REPLACE:
            case TRUNCATE:
            case DROP:
            case ALTER:
            case CREATE:
            case CHECK:
            case SET:
            case DESC:
            case OPTIMIZE:
            case ANALYZE:
            case KILL:
            case EXPLAIN:
            case LPAREN:
                acceptHint = true;
            default:
                break;
        }
        if (lexer.identifierEquals("MSCK")) {
            acceptHint = true;
        }
        if (acceptHint) {
            SQLStatementImpl stmt = (SQLStatementImpl) this.parseStatement();
            stmt.setHeadHints(hints);
            statementList.add(stmt);
            return true;
        }
    }
    if (lexer.token() == Token.FROM) {
        SQLStatement stmt = this.parseInsert();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOAD)) {
        HiveLoadDataStatement stmt = parseLoad();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.ANALYZE)) {
        SQLStatement stmt = parseAnalyze();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.EXPORT)) {
        SQLStatement stmt = parseExport();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.IMPORT)) {
        SQLStatement stmt = parseImport();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("MSCK")) {
        SQLStatement stmt = parseMsck();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.SHOW)) {
        Lexer.SavePoint savePoint = this.lexer.mark();
        lexer.nextToken();
        if (lexer.identifierEquals(FnvHash.Constants.VIEWS)) {
            lexer.nextToken();
            SQLShowViewsStatement stmt = new SQLShowViewsStatement();
            if (lexer.token() == Token.IN) {
                lexer.nextToken();
                SQLName db = this.exprParser.name();
                stmt.setDatabase(db);
            }
            if (lexer.token() == Token.LIKE) {
                lexer.nextToken();
                SQLExpr pattern = this.exprParser.expr();
                stmt.setLike(pattern);
            }
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals(FnvHash.Constants.TABLES)) {
            lexer.reset(savePoint);
            SQLStatement stmt = this.parseShowTables();
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals(FnvHash.Constants.DATABASES)) {
            lexer.nextToken();
            SQLShowDatabasesStatement stmt = parseShowDatabases(false);
            statementList.add(stmt);
            return true;
        }
        if (lexer.token() == Token.INDEX) {
            lexer.nextToken();
            SQLShowIndexesStatement stmt = new SQLShowIndexesStatement();
            stmt.setType("INDEX");
            if (lexer.token() == Token.ON) {
                lexer.nextToken();
                SQLName table = exprParser.name();
                stmt.setTable(table);
            }
            if (lexer.token() == Token.HINT) {
                stmt.setHints(this.exprParser.parseHints());
            }
            statementList.add(stmt);
            return true;
        }
        if (lexer.token() == Token.CREATE) {
            SQLShowCreateTableStatement stmt = parseShowCreateTable();
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals(FnvHash.Constants.PARTITIONS)) {
            lexer.nextToken();
            SQLShowPartitionsStmt stmt = new SQLShowPartitionsStmt();
            if (lexer.token() == Token.FROM) {
                lexer.nextToken();
            }
            SQLExpr expr = this.exprParser.expr();
            stmt.setTableSource(new SQLExprTableSource(expr));
            if (lexer.token() == Token.PARTITION) {
                lexer.nextToken();
                accept(Token.LPAREN);
                parseAssignItems(stmt.getPartition(), stmt, false);
                accept(Token.RPAREN);
            }
            if (lexer.token() == Token.WHERE) {
                lexer.nextToken();
                stmt.setWhere(this.exprParser.expr());
            }
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals(FnvHash.Constants.COLUMNS)) {
            lexer.nextToken();
            SQLShowColumnsStatement stmt = new SQLShowColumnsStatement();
            if (lexer.token() == Token.FROM || lexer.token() == Token.IN) {
                lexer.nextToken();
                SQLName table = exprParser.name();
                if (lexer.token() == Token.SUB && table instanceof SQLIdentifierExpr) {
                    lexer.mark();
                    lexer.nextToken();
                    String strVal = lexer.stringVal();
                    lexer.nextToken();
                    if (table instanceof SQLIdentifierExpr) {
                        SQLIdentifierExpr ident = (SQLIdentifierExpr) table;
                        table = new SQLIdentifierExpr(ident.getName() + "-" + strVal);
                    }
                }
                stmt.setTable(table);
            }
            if (lexer.token() == Token.LIKE) {
                lexer.nextToken();
                SQLExpr like = exprParser.expr();
                stmt.setLike(like);
            }
            if (lexer.token() == Token.WHERE) {
                lexer.nextToken();
                SQLExpr where = exprParser.expr();
                stmt.setWhere(where);
            }
            statementList.add(stmt);
            return true;
        }
        throw new ParserException("TODO " + lexer.info());
    }
    return false;
}
Also used : HiveLoadDataStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement) SQLIdentifierExpr(com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr)

Example 2 with HiveLoadDataStatement

use of com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement in project druid by alibaba.

the class OdpsStatementParser method parseLoad.

protected HiveLoadDataStatement parseLoad() {
    acceptIdentifier("LOAD");
    HiveLoadDataStatement stmt = new HiveLoadDataStatement();
    if (lexer.token() == OVERWRITE) {
        stmt.setOverwrite(true);
        lexer.nextToken();
    } else if (lexer.token() == Token.INTO) {
        lexer.nextToken();
    }
    accept(Token.TABLE);
    stmt.setInto(this.exprParser.expr());
    if (lexer.token() == Token.PARTITION) {
        lexer.nextToken();
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getPartition(), stmt);
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOCAL)) {
        lexer.nextToken();
        stmt.setLocal(true);
    }
    accept(Token.FROM);
    acceptIdentifier("LOCATION");
    SQLExpr inpath = this.exprParser.expr();
    stmt.setInpath(inpath);
    if (lexer.identifierEquals("STORED")) {
        lexer.nextToken();
        if (lexer.token() == Token.BY) {
            lexer.nextToken();
            stmt.setStoredBy(this.exprParser.expr());
        } else {
            accept(Token.AS);
            stmt.setStoredAs(this.exprParser.expr());
        }
    }
    if (lexer.identifierEquals("ROW")) {
        lexer.nextToken();
        acceptIdentifier("FORMAT");
        acceptIdentifier("SERDE");
        stmt.setRowFormat(this.exprParser.expr());
    }
    if (lexer.token() == Token.WITH) {
        lexer.nextToken();
        acceptIdentifier("SERDEPROPERTIES");
        accept(Token.LPAREN);
        for (; ; ) {
            String name = lexer.stringVal();
            lexer.nextToken();
            accept(Token.EQ);
            SQLExpr value = this.exprParser.primary();
            stmt.getSerdeProperties().put(name, value);
            if (lexer.token() == Token.COMMA) {
                lexer.nextToken();
                continue;
            }
            break;
        }
        accept(Token.RPAREN);
    }
    if (lexer.identifierEquals("STORED")) {
        lexer.nextToken();
        accept(Token.AS);
        stmt.setStoredAs(this.exprParser.expr());
    }
    if (lexer.identifierEquals(FnvHash.Constants.USING)) {
        lexer.nextToken();
        stmt.setUsing(this.exprParser.expr());
    }
    return stmt;
}
Also used : HiveLoadDataStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement)

Example 3 with HiveLoadDataStatement

use of com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement in project druid by alibaba.

the class OdpsStatementParser method parseStatementListDialect.

public boolean parseStatementListDialect(List<SQLStatement> statementList) {
    if (lexer.token() == Token.FROM) {
        SQLStatement stmt = this.parseInsert();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("ANALYZE")) {
        SQLStatement stmt = parseAnalyze();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("ADD")) {
        lexer.nextToken();
        if (lexer.identifierEquals("STATISTIC")) {
            lexer.nextToken();
            OdpsAddStatisticStatement stmt = new OdpsAddStatisticStatement();
            stmt.setTable(this.exprParser.name());
            stmt.setStatisticClause(parseStaticClause());
            statementList.add(stmt);
            return true;
        }
        if (lexer.token() == Token.USER) {
            lexer.nextToken();
            OdpsAddUserStatement stmt = new OdpsAddUserStatement();
            stmt.setUser(this.exprParser.name());
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals("ACCOUNTPROVIDER")) {
            lexer.nextToken();
            OdpsAddAccountProviderStatement stmt = new OdpsAddAccountProviderStatement();
            stmt.setProvider(this.exprParser.name());
            statementList.add(stmt);
            return true;
        }
        if (lexer.token() == Token.TABLE) {
            lexer.nextToken();
            OdpsAddTableStatement stmt = new OdpsAddTableStatement();
            stmt.setTable(this.exprParser.name());
            if (lexer.token() == Token.PARTITION) {
                lexer.nextToken();
                this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
            }
            if (lexer.token() == Token.AS) {
                lexer.nextToken();
                SQLName name = this.exprParser.name();
                stmt.getTable().setAlias(name.toString());
            }
            if (lexer.token() == Token.COMMENT) {
                lexer.nextToken();
                stmt.setComment(this.exprParser.primary());
            }
            if (lexer.token() == Token.SUB) {
                lexer.nextToken();
                acceptIdentifier("f");
                stmt.setForce(true);
            }
            if (lexer.token() == Token.TO) {
                lexer.nextToken();
                acceptIdentifier("PACKAGE");
                SQLName packageName = this.exprParser.name();
                stmt.setToPackage(packageName);
                if (lexer.token() == Token.WITH) {
                    lexer.nextToken();
                    acceptIdentifier("PRIVILEGES");
                    parsePrivileages(stmt.getPrivileges(), stmt);
                }
            }
            statementList.add(stmt);
            return true;
        }
        if (lexer.identifierEquals(FnvHash.Constants.FILE) || lexer.identifierEquals(FnvHash.Constants.JAR) || lexer.identifierEquals(FnvHash.Constants.PY) || lexer.identifierEquals(FnvHash.Constants.ARCHIVE)) {
            OdpsAddFileStatement stmt = new OdpsAddFileStatement();
            long hash = lexer.hash_lower();
            if (hash == FnvHash.Constants.JAR) {
                stmt.setType(OdpsAddFileStatement.FileType.JAR);
            } else if (hash == FnvHash.Constants.PY) {
                stmt.setType(OdpsAddFileStatement.FileType.PY);
            } else if (hash == FnvHash.Constants.ARCHIVE) {
                stmt.setType(OdpsAddFileStatement.FileType.ARCHIVE);
            }
            lexer.nextPath();
            String path = lexer.stringVal();
            lexer.nextToken();
            stmt.setFile(path);
            if (lexer.token() == Token.AS) {
                lexer.nextToken();
                SQLName name = this.exprParser.name();
                stmt.setAlias(name.toString());
            }
            if (lexer.token() == Token.COMMENT) {
                lexer.nextToken();
                stmt.setComment(this.exprParser.primary());
            }
            if (lexer.token() == Token.SUB) {
                lexer.nextToken();
                acceptIdentifier("f");
                stmt.setForce(true);
            }
            statementList.add(stmt);
            return true;
        }
        throw new ParserException("TODO " + lexer.info());
    }
    if (lexer.identifierEquals("REMOVE")) {
        lexer.nextToken();
        if (lexer.identifierEquals("STATISTIC")) {
            lexer.nextToken();
            OdpsRemoveStatisticStatement stmt = new OdpsRemoveStatisticStatement();
            stmt.setTable(this.exprParser.name());
            stmt.setStatisticClause(parseStaticClause());
            statementList.add(stmt);
            return true;
        }
        if (lexer.token() == Token.USER) {
            lexer.nextToken();
            OdpsRemoveUserStatement stmt = new OdpsRemoveUserStatement();
            stmt.setUser((SQLIdentifierExpr) this.exprParser.name());
            statementList.add(stmt);
            return true;
        }
        throw new ParserException("TODO " + lexer.info());
    }
    if (lexer.identifierEquals("READ")) {
        OdpsReadStatement stmt = new OdpsReadStatement();
        if (lexer.hasComment() && lexer.isKeepComments()) {
            stmt.addBeforeComment(lexer.readAndResetComments());
        }
        lexer.nextToken();
        stmt.setTable(this.exprParser.name());
        if (lexer.token() == Token.LPAREN) {
            lexer.nextToken();
            this.exprParser.names(stmt.getColumns(), stmt);
            accept(Token.RPAREN);
        }
        if (lexer.token() == Token.PARTITION) {
            lexer.nextToken();
            accept(Token.LPAREN);
            parseAssignItems(stmt.getPartition(), stmt);
            accept(Token.RPAREN);
        }
        if (lexer.token() == Token.LITERAL_INT) {
            stmt.setRowCount(this.exprParser.primary());
        }
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("LIST")) {
        OdpsListStmt stmt = new OdpsListStmt();
        lexer.nextToken();
        stmt.setObject(this.exprParser.expr());
        if (lexer.identifierEquals("ROLES") && stmt.getObject() instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) stmt.getObject()).nameEquals("TENANT")) {
            lexer.nextToken();
            stmt.setObject(new SQLIdentifierExpr("TENANT ROLES"));
        } else if (lexer.identifierEquals("OUTPUT") && stmt.getObject() instanceof SQLIdentifierExpr && ((SQLIdentifierExpr) stmt.getObject()).nameEquals("TEMPORARY")) {
            lexer.nextToken();
            stmt.setObject(new SQLIdentifierExpr("TEMPORARY OUTPUT"));
        }
        statementList.add(stmt);
        return true;
    }
    if (lexer.token() == Token.DESC || lexer.identifierEquals("DESCRIBE")) {
        SQLStatement stmt = parseDescribe();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("WHOAMI")) {
        lexer.nextToken();
        SQLWhoamiStatement stmt = new SQLWhoamiStatement();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("COUNT")) {
        lexer.nextToken();
        OdpsCountStatement stmt = new OdpsCountStatement();
        stmt.setTable(this.exprParser.name());
        if (lexer.token() == Token.PARTITION) {
            lexer.nextToken();
            this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
        }
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("MSCK")) {
        SQLStatement stmt = parseMsck();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("EXSTORE")) {
        lexer.nextToken();
        OdpsExstoreStatement stmt = new OdpsExstoreStatement();
        SQLExpr table = this.exprParser.expr();
        stmt.setTable(new SQLExprTableSource(table));
        accept(Token.PARTITION);
        this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals("INSTALL")) {
        lexer.nextToken();
        acceptIdentifier("PACKAGE");
        OdpsInstallPackageStatement stmt = new OdpsInstallPackageStatement();
        stmt.setPackageName(this.exprParser.name());
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.KILL)) {
        SQLStatement stmt = parseKill();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.LOAD)) {
        HiveLoadDataStatement stmt = parseLoad();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.MERGE)) {
        SQLStatement stmt = parseMerge();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.CLONE)) {
        SQLStatement stmt = parseClone();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.UNLOAD)) {
        SQLStatement stmt = parseUnload();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.BEGIN)) {
        SQLStatement stmt = parseBlock();
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.RESTORE)) {
        lexer.nextToken();
        accept(Token.TABLE);
        OdpsRestoreStatement stmt = new OdpsRestoreStatement();
        stmt.setTable(this.exprParser.name());
        if (lexer.token() == Token.LPAREN) {
            this.exprParser.parseAssignItem(stmt.getProperties(), stmt);
        }
        if (lexer.token() == Token.PARTITION) {
            lexer.nextToken();
            this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
        }
        if (lexer.token() == Token.TO) {
            lexer.nextToken();
            acceptIdentifier("LSN");
            stmt.setTo(this.exprParser.expr());
        }
        if (lexer.token() == Token.AS) {
            lexer.nextToken();
            stmt.setAlias(this.alias());
        }
        statementList.add(stmt);
        return true;
    }
    if (lexer.identifierEquals(FnvHash.Constants.UNDO)) {
        lexer.nextToken();
        accept(Token.TABLE);
        OdpsUndoTableStatement stmt = new OdpsUndoTableStatement();
        stmt.setTable(new SQLExprTableSource(this.exprParser.name()));
        if (lexer.token() == Token.PARTITION) {
            lexer.nextToken();
            this.exprParser.parseAssignItem(stmt.getPartitions(), stmt);
        }
        accept(Token.TO);
        stmt.setTo(this.exprParser.expr());
        statementList.add(stmt);
        return true;
    }
    if (lexer.token() == Token.FUNCTION) {
        HiveCreateFunctionStatement stmt = (HiveCreateFunctionStatement) parseHiveCreateFunction();
        stmt.setDeclare(true);
        statementList.add(stmt);
        return true;
    }
    if (lexer.token() == Token.VARIANT && lexer.stringVal().startsWith("@")) {
        Lexer.SavePoint mark = lexer.mark();
        String variant = lexer.stringVal();
        lexer.nextToken();
        if (lexer.token() == Token.COLONEQ) {
            lexer.nextToken();
            boolean cache = false;
            if (lexer.identifierEquals(FnvHash.Constants.CACHE)) {
                lexer.nextToken();
                accept(Token.ON);
                cache = true;
            }
            Lexer.SavePoint lpMark = null;
            if (lexer.token() == Token.LPAREN) {
                lpMark = lexer.mark();
                lexer.nextToken();
            }
            ;
            switch(lexer.token()) {
                case LITERAL_INT:
                case LITERAL_FLOAT:
                case LITERAL_CHARS:
                case LITERAL_ALIAS:
                case IDENTIFIER:
                case CASE:
                case CAST:
                case IF:
                case VARIANT:
                case REPLACE:
                case NEW:
                case SUB:
                case TRUE:
                case FALSE:
                    {
                        if (lpMark != null) {
                            lexer.reset(lpMark);
                        }
                        SQLExpr expr = this.exprParser.expr();
                        SQLExprStatement stmt = new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), expr));
                        statementList.add(stmt);
                        return true;
                    }
                default:
                    if (lpMark != null) {
                        lexer.reset(lpMark);
                    }
                    boolean paren = lexer.token() == Token.LPAREN;
                    Lexer.SavePoint parenMark = lexer.mark();
                    SQLSelect select;
                    try {
                        select = new OdpsSelectParser(this.exprParser).select();
                    } catch (ParserException error) {
                        if (paren) {
                            lexer.reset(parenMark);
                            SQLExpr expr = this.exprParser.expr();
                            SQLExprStatement stmt = new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), expr));
                            statementList.add(stmt);
                            return true;
                        }
                        throw error;
                    }
                    switch(lexer.token()) {
                        case GT:
                        case GTEQ:
                        case EQ:
                        case LT:
                        case LTEQ:
                            statementList.add(new SQLExprStatement(new SQLAssignItem(new SQLIdentifierExpr(variant), this.exprParser.exprRest(new SQLQueryExpr(select)))));
                            return true;
                        default:
                            break;
                    }
                    SQLSelectStatement stmt = new SQLSelectStatement(select, dbType);
                    OdpsQueryAliasStatement aliasQueryStatement = new OdpsQueryAliasStatement(variant, stmt);
                    aliasQueryStatement.setCache(cache);
                    statementList.add(aliasQueryStatement);
                    return true;
            }
        }
        OdpsDeclareVariableStatement stmt = new OdpsDeclareVariableStatement();
        if (lexer.token() != Token.EQ && lexer.token() != Token.SEMI && lexer.token() != Token.EOF) {
            stmt.setDataType(this.exprParser.parseDataType());
        }
        if (lexer.token() == Token.EQ || lexer.token() == Token.COLONEQ) {
            lexer.nextToken();
            stmt.setInitValue(this.exprParser.expr());
        }
        if (lexer.token() == Token.SEMI) {
            lexer.nextToken();
        }
        statementList.add(stmt);
        return true;
    }
    if (lexer.token() == Token.IF) {
        SQLStatement stmt = parseIf();
        statementList.add(stmt);
        return true;
    }
    if (lexer.token() == Token.CODE) {
        Lexer.SavePoint mark = lexer.mark();
        lexer.nextToken();
        if (lexer.token() == Token.EOF || lexer.token() == Token.SEMI) {
            return true;
        }
        lexer.reset(mark);
    }
    return false;
}
Also used : HiveLoadDataStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement) HiveCreateFunctionStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement) SQLIdentifierExpr(com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr) SQLQueryExpr(com.alibaba.druid.sql.ast.expr.SQLQueryExpr)

Example 4 with HiveLoadDataStatement

use of com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement in project druid by alibaba.

the class HiveStatementParser method parseLoad.

protected HiveLoadDataStatement parseLoad() {
    acceptIdentifier("LOAD");
    acceptIdentifier("DATA");
    HiveLoadDataStatement stmt = new HiveLoadDataStatement();
    if (lexer.identifierEquals(FnvHash.Constants.LOCAL)) {
        lexer.nextToken();
        stmt.setLocal(true);
    }
    acceptIdentifier("INPATH");
    SQLExpr inpath = this.exprParser.expr();
    stmt.setInpath(inpath);
    if (lexer.token() == Token.OVERWRITE) {
        lexer.nextToken();
        stmt.setOverwrite(true);
    }
    accept(Token.INTO);
    accept(Token.TABLE);
    SQLExpr table = this.exprParser.expr();
    stmt.setInto(table);
    if (lexer.token() == Token.PARTITION) {
        lexer.nextToken();
        accept(Token.LPAREN);
        this.exprParser.exprList(stmt.getPartition(), stmt);
        accept(Token.RPAREN);
    }
    return stmt;
}
Also used : HiveLoadDataStatement(com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement)

Aggregations

HiveLoadDataStatement (com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement)4 SQLIdentifierExpr (com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr)2 SQLQueryExpr (com.alibaba.druid.sql.ast.expr.SQLQueryExpr)1 HiveCreateFunctionStatement (com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement)1