Search in sources :

Example 1 with HiveMultiInsertStatement

use of com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement in project druid by alibaba.

the class HiveStatementParser method parseInsert.

public SQLStatement parseInsert() {
    if (lexer.token() == Token.FROM) {
        lexer.nextToken();
        HiveMultiInsertStatement stmt = new HiveMultiInsertStatement();
        if (lexer.token() == Token.IDENTIFIER) {
            SQLName tableName = this.exprParser.name();
            SQLExprTableSource from = new SQLExprTableSource(tableName);
            SQLTableSource tableSource = createSQLSelectParser().parseTableSourceRest(from);
            stmt.setFrom(tableSource);
            if (lexer.token() == Token.IDENTIFIER) {
                from.setAlias(lexer.stringVal());
                lexer.nextToken();
            }
        } else {
            accept(Token.LPAREN);
            SQLSelectParser selectParser = createSQLSelectParser();
            SQLSelect select = selectParser.select();
            accept(Token.RPAREN);
            String alias = lexer.stringVal();
            accept(Token.IDENTIFIER);
            SQLTableSource from = new SQLSubqueryTableSource(select, alias);
            switch(lexer.token()) {
                case LEFT:
                case RIGHT:
                case FULL:
                case JOIN:
                    from = selectParser.parseTableSourceRest(from);
                    break;
                default:
                    break;
            }
            stmt.setFrom(from);
        }
        for (; ; ) {
            HiveInsert insert = parseHiveInsert();
            stmt.addItem(insert);
            if (lexer.token() != Token.INSERT) {
                break;
            }
        }
        return stmt;
    }
    return parseHiveInsertStmt();
}
Also used : HiveInsert(com.alibaba.druid.sql.dialect.hive.ast.HiveInsert) HiveMultiInsertStatement(com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement)

Example 2 with HiveMultiInsertStatement

use of com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement in project druid by alibaba.

the class SQLStatementParser method parseWith.

public SQLStatement parseWith() {
    SQLWithSubqueryClause with = this.parseWithQuery();
    if (lexer.token == Token.SELECT || lexer.token == Token.LPAREN) {
        SQLSelectParser selectParser = createSQLSelectParser();
        SQLSelect select = selectParser.select();
        select.setWithSubQuery(with);
        return new SQLSelectStatement(select, dbType);
    } else if (lexer.token == Token.INSERT) {
        SQLInsertStatement insert = (SQLInsertStatement) this.parseInsert();
        insert.setWith(with);
        return insert;
    } else if (lexer.token == Token.FROM) {
        HiveMultiInsertStatement insert = (HiveMultiInsertStatement) this.parseInsert();
        insert.setWith(with);
        return insert;
    } else if (lexer.token == UPDATE) {
        SQLUpdateStatement update = this.parseUpdateStatement();
        update.setWith(with);
        return update;
    }
    throw new ParserException("TODO. " + lexer.info());
}
Also used : HiveMultiInsertStatement(com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement)

Example 3 with HiveMultiInsertStatement

use of com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement in project druid by alibaba.

the class OdpsStatementParser method parseInsert.

public SQLStatement parseInsert() {
    if (lexer.token() == Token.FROM) {
        lexer.nextToken();
        HiveMultiInsertStatement stmt = new HiveMultiInsertStatement();
        if (lexer.token() == Token.IDENTIFIER || lexer.token() == Token.VARIANT) {
            Lexer.SavePoint mark = lexer.mark();
            SQLExpr tableName = this.exprParser.name();
            if (lexer.token() == Token.LPAREN) {
                lexer.reset(mark);
                tableName = this.exprParser.primary();
            }
            SQLTableSource from = new SQLExprTableSource(tableName);
            if (lexer.token() == Token.IDENTIFIER) {
                String alias = alias();
                from.setAlias(alias);
            }
            SQLSelectParser selectParser = createSQLSelectParser();
            from = selectParser.parseTableSourceRest(from);
            if (lexer.token() == Token.WHERE) {
                lexer.nextToken();
                SQLExpr where = this.exprParser.expr();
                SQLSelectQueryBlock queryBlock = new SQLSelectQueryBlock();
                queryBlock.addSelectItem(new SQLAllColumnExpr());
                queryBlock.setFrom(from);
                queryBlock.setWhere(where);
                if (lexer.token() == Token.GROUP) {
                    selectParser.parseGroupBy(queryBlock);
                }
                stmt.setFrom(new SQLSubqueryTableSource(queryBlock));
            } else {
                stmt.setFrom(from);
            }
        } else {
            SQLCommentHint hint = null;
            if (lexer.token() == Token.HINT) {
                hint = this.exprParser.parseHint();
            }
            accept(Token.LPAREN);
            boolean paren2 = lexer.token() == Token.LPAREN;
            SQLSelectParser selectParser = createSQLSelectParser();
            SQLSelect select = selectParser.select();
            SQLTableSource from = null;
            if (paren2 && lexer.token() != Token.RPAREN) {
                String subQueryAs = null;
                if (lexer.token() == Token.AS) {
                    lexer.nextToken();
                    subQueryAs = tableAlias(true);
                } else {
                    subQueryAs = tableAlias(false);
                }
                SQLSubqueryTableSource subQuery = new SQLSubqueryTableSource(select, subQueryAs);
                from = selectParser.parseTableSourceRest(subQuery);
            }
            accept(Token.RPAREN);
            String alias;
            if (lexer.token() == Token.INSERT) {
                alias = null;
            } else if (lexer.token() == Token.SELECT) {
                // skip
                alias = null;
            } else {
                if (lexer.token() == Token.AS) {
                    lexer.nextToken();
                }
                alias = lexer.stringVal();
                accept(Token.IDENTIFIER);
            }
            if (from == null) {
                from = new SQLSubqueryTableSource(select, alias);
            } else {
                if (alias != null) {
                    from.setAlias(alias);
                }
            }
            SQLTableSource tableSource = selectParser.parseTableSourceRest(from);
            if (hint != null) {
                if (tableSource instanceof SQLJoinTableSource) {
                    ((SQLJoinTableSource) tableSource).setHint(hint);
                }
            }
            stmt.setFrom(tableSource);
        }
        if (lexer.token() == Token.SELECT) {
            SQLSelectParser selectParser = createSQLSelectParser();
            SQLSelect query = selectParser.select();
            HiveInsert insert = new HiveInsert();
            insert.setQuery(query);
            stmt.addItem(insert);
            return stmt;
        }
        for (; ; ) {
            HiveInsert insert = parseHiveInsert();
            stmt.addItem(insert);
            if (lexer.token() != Token.INSERT) {
                break;
            }
        }
        return stmt;
    }
    return parseHiveInsertStmt();
}
Also used : HiveInsert(com.alibaba.druid.sql.dialect.hive.ast.HiveInsert) HiveMultiInsertStatement(com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement) SQLAllColumnExpr(com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr)

Example 4 with HiveMultiInsertStatement

use of com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement in project druid by alibaba.

the class SchemaStatVisitor method visit.

public boolean visit(SQLSelectQueryBlock x) {
    SQLTableSource from = x.getFrom();
    setMode(x, Mode.Select);
    boolean isHiveMultiInsert = false;
    if (from == null) {
        isHiveMultiInsert = x.getParent() != null && x.getParent().getParent() instanceof HiveInsert && x.getParent().getParent().getParent() instanceof HiveMultiInsertStatement;
        if (isHiveMultiInsert) {
            from = ((HiveMultiInsertStatement) x.getParent().getParent().getParent()).getFrom();
        }
    }
    if (from == null) {
        for (SQLSelectItem selectItem : x.getSelectList()) {
            statExpr(selectItem.getExpr());
        }
        return false;
    }
    if (from != null) {
        // 提前执行,获得aliasMap
        from.accept(this);
    }
    SQLExprTableSource into = x.getInto();
    if (into != null && into.getExpr() instanceof SQLName) {
        SQLName intoExpr = (SQLName) into.getExpr();
        boolean isParam = intoExpr instanceof SQLIdentifierExpr && isParam((SQLIdentifierExpr) intoExpr);
        if (!isParam) {
            TableStat stat = getTableStat(intoExpr);
            if (stat != null) {
                stat.incrementInsertCount();
            }
        }
        into.accept(this);
    }
    for (SQLSelectItem selectItem : x.getSelectList()) {
        if (selectItem.getClass() == SQLSelectItem.class) {
            statExpr(selectItem.getExpr());
        } else {
            selectItem.accept(this);
        }
    }
    SQLExpr where = x.getWhere();
    if (where != null) {
        statExpr(where);
    }
    SQLExpr startWith = x.getStartWith();
    if (startWith != null) {
        statExpr(startWith);
    }
    SQLExpr connectBy = x.getConnectBy();
    if (connectBy != null) {
        statExpr(connectBy);
    }
    SQLSelectGroupByClause groupBy = x.getGroupBy();
    if (groupBy != null) {
        for (SQLExpr expr : groupBy.getItems()) {
            statExpr(expr);
        }
    }
    List<SQLWindow> windows = x.getWindows();
    if (windows != null && windows.size() > 0) {
        for (SQLWindow window : windows) {
            window.accept(this);
        }
    }
    SQLOrderBy orderBy = x.getOrderBy();
    if (orderBy != null) {
        this.visit(orderBy);
    }
    SQLExpr first = x.getFirst();
    if (first != null) {
        statExpr(first);
    }
    List<SQLSelectOrderByItem> distributeBy = x.getDistributeBy();
    if (distributeBy != null) {
        for (SQLSelectOrderByItem item : distributeBy) {
            statExpr(item.getExpr());
        }
    }
    List<SQLSelectOrderByItem> sortBy = x.getSortBy();
    if (sortBy != null) {
        for (SQLSelectOrderByItem orderByItem : sortBy) {
            statExpr(orderByItem.getExpr());
        }
    }
    for (SQLExpr expr : x.getForUpdateOf()) {
        statExpr(expr);
    }
    return false;
}
Also used : HiveInsert(com.alibaba.druid.sql.dialect.hive.ast.HiveInsert) TableStat(com.alibaba.druid.stat.TableStat) HiveMultiInsertStatement(com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement)

Example 5 with HiveMultiInsertStatement

use of com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement in project druid by alibaba.

the class HiveSelectTest_43_bug method test_0.

public void test_0() throws Exception {
    String sql = "from (select a.ddate\n" + "        ,a.game_id\n" + "        ,a.plat_id\n" + "        ,a.channel_group_id\n" + "        ,a.channel_id\n" + "        ,a.zone_id\n" + "        ,a.player_id\n" + "        ,pay_amt_1d\n" + "        ,pay_cnt_1d\n" + "        ,CASE WHEN actv_day_num_td = 0 THEN 0 \n" + "              WHEN actv_day_num_td<=3 THEN 1\n" + "              WHEN actv_day_num_td<=6 THEN 2 \n" + "              WHEN actv_day_num_td<=13 THEN 3\n" + "              WHEN actv_day_num_td<=21 THEN 4\n" + "              WHEN actv_day_num_td<=30 THEN 5\n" + "              WHEN actv_day_num_td<=60 THEN 6\n" + "              WHEN actv_day_num_td<=90 THEN 7\n" + "              WHEN actv_day_num_td>90 THEN 99 \n" + "              ELSE NULL END AS actv_day_num_td_segment_id     \n" + "        ,CASE WHEN reg_day_num_td = 0 THEN 0 \n" + "              WHEN reg_day_num_td<=3 THEN 1\n" + "              WHEN reg_day_num_td<=6 THEN 2 \n" + "              WHEN reg_day_num_td<=13 THEN 3\n" + "              WHEN reg_day_num_td<=21 THEN 4\n" + "              WHEN reg_day_num_td<=30 THEN 5\n" + "              WHEN reg_day_num_td<=60 THEN 6\n" + "              WHEN reg_day_num_td<=90 THEN 7\n" + "              WHEN reg_day_num_td>90 THEN 99 \n" + "              ELSE NULL END AS reg_day_num_td_segment_id \n" + "  from   (select ddate\n" + "                ,game_id\n" + "                ,plat_id\n" + "                ,channel_group_id\n" + "                ,channel_id\n" + "                ,zone_id\n" + "                ,player_id\n" + "                ,pay_amt_1d\n" + "                ,pay_cnt_1d\n" + "          from   dws_game_sdk_base.dws_game_sdk_pay_user_base_d a\n" + "          where  a.ddate >= '20200512'\n" + "          and    a.ddate < '20200513'\n" + "          and    a.first_pay_flag = 1) a\n" + "  inner  join (select ddate\n" + "                    ,game_id\n" + "                    ,plat_id\n" + "                    ,channel_group_id\n" + "                    ,channel_id\n" + "                    ,zone_id\n" + "                    ,player_id\n" + "                    ,actv_day_num_td\n" + "                    ,udf.datediff(ddate, reg_date) as reg_day_num_td\n" + "              from   dws_game_sdk_base.dws_game_sdk_user_actv_base_d a\n" + "              where  a.ddate >= '20200512'\n" + "              and    a.ddate < '20200513') b\n" + "  on     (a.ddate = b.ddate and a.game_id = b.game_id and\n" + "         a.plat_id = b.plat_id and a.channel_group_id = b.channel_group_id and\n" + "         a.channel_id = b.channel_id and a.zone_id = b.zone_id and\n" + "         a.player_id = b.player_id) ) d \n" + "INSERT OVERWRITE TABLE ads_game_sdk_base.ads_rpt_game_sdk_user_segment_d PARTITION(ddate,segment_type,user_type_id)\n" + "SELECT  game_id\n" + "    ,plat_id\n" + "    ,channel_group_id\n" + "    ,channel_id\n" + "    ,zone_id\n" + "    ,actv_day_num_td_segment_id AS segment_id\n" + "    ,CASE    WHEN actv_day_num_td_segment_id = 0 THEN '0天'\n" + "             WHEN actv_day_num_td_segment_id = 1 THEN '1~3天'\n" + "             WHEN actv_day_num_td_segment_id = 2 THEN '4~6天'\n" + "             WHEN actv_day_num_td_segment_id = 3 THEN '7~13天'\n" + "             WHEN actv_day_num_td_segment_id = 4 THEN '14~21天'\n" + "             WHEN actv_day_num_td_segment_id = 5 THEN '21~30天'\n" + "             WHEN actv_day_num_td_segment_id = 6 THEN '30~60天'\n" + "             WHEN actv_day_num_td_segment_id = 7 THEN '60~90天'\n" + "             WHEN actv_day_num_td_segment_id = 99 THEN '>90天' \n" + "     END AS segment_name\n" + "    ,COUNT(1) AS segment_user_num_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_value_sum_1d\n" + "    ,SUM(pay_cnt_1d) AS segment_ext_int_value1_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value2_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value3_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value1_4d\n" + "    ,SUM(pay_amt_1d) AS segment_ext_int_value4_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value2_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value3_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value4_1d\n" + "    ,ddate\n" + "    ,'first_pay_user_actv_day_num' AS segment_type\n" + "    ,3 AS user_type_id\n" + "WHERE   actv_day_num_td_segment_id IS NOT NULL\n" + "GROUP BY ddate\n" + "     ,game_id\n" + "     ,plat_id\n" + "     ,channel_group_id\n" + "     ,channel_id\n" + "     ,zone_id\n" + "     ,actv_day_num_td_segment_id\n" + "INSERT OVERWRITE TABLE ads_game_sdk_base.ads_rpt_game_sdk_user_segment_d PARTITION(ddate,segment_type,user_type_id)\n" + "SELECT  game_id\n" + "    ,plat_id\n" + "    ,channel_group_id\n" + "    ,channel_id\n" + "    ,zone_id\n" + "    ,reg_day_num_td_segment_id AS segment_id\n" + "    ,CASE    WHEN reg_day_num_td_segment_id = 0 THEN '0天'\n" + "             WHEN reg_day_num_td_segment_id = 1 THEN '1~3天'\n" + "             WHEN reg_day_num_td_segment_id = 2 THEN '4~6天'\n" + "             WHEN reg_day_num_td_segment_id = 3 THEN '7~13天'\n" + "             WHEN reg_day_num_td_segment_id = 4 THEN '14~21天'\n" + "             WHEN reg_day_num_td_segment_id = 5 THEN '21~30天'\n" + "             WHEN reg_day_num_td_segment_id = 6 THEN '30~60天'\n" + "             WHEN reg_day_num_td_segment_id = 7 THEN '60~90天'\n" + "             WHEN reg_day_num_td_segment_id = 99 THEN '>90天' \n" + "     END AS segment_name\n" + "    ,COUNT(1) AS segment_user_num_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_value_sum_1d\n" + "    ,SUM(pay_cnt_1d) AS segment_ext_int_value1_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value2_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value3_1d\n" + "    ,CAST(NULL AS BIGINT) AS segment_ext_int_value1_4d\n" + "    ,SUM(pay_amt_1d) AS segment_ext_int_value4_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value2_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value3_1d\n" + "    ,CAST(NULL AS DOUBLE) AS segment_ext_double_value4_1d\n" + "    ,ddate\n" + "    ,'first_pay_user_reg_day_num' AS segment_type\n" + "    ,3 AS user_type_id\n" + "WHERE   reg_day_num_td_segment_id IS NOT NULL\n" + "GROUP BY ddate\n" + "     ,game_id\n" + "     ,plat_id\n" + "     ,channel_group_id\n" + "     ,channel_id\n" + "     ,zone_id\n" + "     ,reg_day_num_td_segment_id\n" + ";";
    List<SQLStatement> statementList = SQLUtils.parseStatements(sql, DbType.hive);
    HiveMultiInsertStatement stmt = (HiveMultiInsertStatement) statementList.get(0);
    assertEquals("FROM (\n" + "\tSELECT a.ddate, a.game_id, a.plat_id, a.channel_group_id, a.channel_id\n" + "\t\t, a.zone_id, a.player_id, pay_amt_1d, pay_cnt_1d\n" + "\t\t, CASE \n" + "\t\t\tWHEN actv_day_num_td = 0 THEN 0\n" + "\t\t\tWHEN actv_day_num_td <= 3 THEN 1\n" + "\t\t\tWHEN actv_day_num_td <= 6 THEN 2\n" + "\t\t\tWHEN actv_day_num_td <= 13 THEN 3\n" + "\t\t\tWHEN actv_day_num_td <= 21 THEN 4\n" + "\t\t\tWHEN actv_day_num_td <= 30 THEN 5\n" + "\t\t\tWHEN actv_day_num_td <= 60 THEN 6\n" + "\t\t\tWHEN actv_day_num_td <= 90 THEN 7\n" + "\t\t\tWHEN actv_day_num_td > 90 THEN 99\n" + "\t\t\tELSE NULL\n" + "\t\tEND AS actv_day_num_td_segment_id\n" + "\t\t, CASE \n" + "\t\t\tWHEN reg_day_num_td = 0 THEN 0\n" + "\t\t\tWHEN reg_day_num_td <= 3 THEN 1\n" + "\t\t\tWHEN reg_day_num_td <= 6 THEN 2\n" + "\t\t\tWHEN reg_day_num_td <= 13 THEN 3\n" + "\t\t\tWHEN reg_day_num_td <= 21 THEN 4\n" + "\t\t\tWHEN reg_day_num_td <= 30 THEN 5\n" + "\t\t\tWHEN reg_day_num_td <= 60 THEN 6\n" + "\t\t\tWHEN reg_day_num_td <= 90 THEN 7\n" + "\t\t\tWHEN reg_day_num_td > 90 THEN 99\n" + "\t\t\tELSE NULL\n" + "\t\tEND AS reg_day_num_td_segment_id\n" + "\tFROM (\n" + "\t\tSELECT ddate, game_id, plat_id, channel_group_id, channel_id\n" + "\t\t\t, zone_id, player_id, pay_amt_1d, pay_cnt_1d\n" + "\t\tFROM dws_game_sdk_base.dws_game_sdk_pay_user_base_d a\n" + "\t\tWHERE a.ddate >= '20200512'\n" + "\t\t\tAND a.ddate < '20200513'\n" + "\t\t\tAND a.first_pay_flag = 1\n" + "\t) a\n" + "\t\tINNER JOIN (\n" + "\t\t\tSELECT ddate, game_id, plat_id, channel_group_id, channel_id\n" + "\t\t\t\t, zone_id, player_id, actv_day_num_td\n" + "\t\t\t\t, udf.datediff(ddate, reg_date) AS reg_day_num_td\n" + "\t\t\tFROM dws_game_sdk_base.dws_game_sdk_user_actv_base_d a\n" + "\t\t\tWHERE a.ddate >= '20200512'\n" + "\t\t\t\tAND a.ddate < '20200513'\n" + "\t\t) b\n" + "\t\tON a.ddate = b.ddate\n" + "\t\t\tAND a.game_id = b.game_id\n" + "\t\t\tAND a.plat_id = b.plat_id\n" + "\t\t\tAND a.channel_group_id = b.channel_group_id\n" + "\t\t\tAND a.channel_id = b.channel_id\n" + "\t\t\tAND a.zone_id = b.zone_id\n" + "\t\t\tAND a.player_id = b.player_id\n" + ") d\n" + "INSERT OVERWRITE TABLE ads_game_sdk_base.ads_rpt_game_sdk_user_segment_d PARTITION (ddate, segment_type, user_type_id)\n" + "SELECT game_id, plat_id, channel_group_id, channel_id, zone_id\n" + "\t, actv_day_num_td_segment_id AS segment_id\n" + "\t, CASE \n" + "\t\tWHEN actv_day_num_td_segment_id = 0 THEN '0天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 1 THEN '1~3天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 2 THEN '4~6天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 3 THEN '7~13天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 4 THEN '14~21天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 5 THEN '21~30天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 6 THEN '30~60天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 7 THEN '60~90天'\n" + "\t\tWHEN actv_day_num_td_segment_id = 99 THEN '>90天'\n" + "\tEND AS segment_name, COUNT(1) AS segment_user_num_1d, CAST(NULL AS BIGINT) AS segment_value_sum_1d\n" + "\t, SUM(pay_cnt_1d) AS segment_ext_int_value1_1d, CAST(NULL AS BIGINT) AS segment_ext_int_value2_1d, CAST(NULL AS BIGINT) AS segment_ext_int_value3_1d\n" + "\t, CAST(NULL AS BIGINT) AS segment_ext_int_value1_4d, SUM(pay_amt_1d) AS segment_ext_int_value4_1d, CAST(NULL AS DOUBLE) AS segment_ext_double_value2_1d, CAST(NULL AS DOUBLE) AS segment_ext_double_value3_1d\n" + "\t, CAST(NULL AS DOUBLE) AS segment_ext_double_value4_1d, ddate\n" + "\t, 'first_pay_user_actv_day_num' AS segment_type, 3 AS user_type_id\n" + "WHERE actv_day_num_td_segment_id IS NOT NULL\n" + "GROUP BY ddate, game_id, plat_id, channel_group_id, channel_id, zone_id, actv_day_num_td_segment_id\n" + "INSERT OVERWRITE TABLE ads_game_sdk_base.ads_rpt_game_sdk_user_segment_d PARTITION (ddate, segment_type, user_type_id)\n" + "SELECT game_id, plat_id, channel_group_id, channel_id, zone_id\n" + "\t, reg_day_num_td_segment_id AS segment_id\n" + "\t, CASE \n" + "\t\tWHEN reg_day_num_td_segment_id = 0 THEN '0天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 1 THEN '1~3天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 2 THEN '4~6天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 3 THEN '7~13天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 4 THEN '14~21天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 5 THEN '21~30天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 6 THEN '30~60天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 7 THEN '60~90天'\n" + "\t\tWHEN reg_day_num_td_segment_id = 99 THEN '>90天'\n" + "\tEND AS segment_name, COUNT(1) AS segment_user_num_1d, CAST(NULL AS BIGINT) AS segment_value_sum_1d\n" + "\t, SUM(pay_cnt_1d) AS segment_ext_int_value1_1d, CAST(NULL AS BIGINT) AS segment_ext_int_value2_1d, CAST(NULL AS BIGINT) AS segment_ext_int_value3_1d\n" + "\t, CAST(NULL AS BIGINT) AS segment_ext_int_value1_4d, SUM(pay_amt_1d) AS segment_ext_int_value4_1d, CAST(NULL AS DOUBLE) AS segment_ext_double_value2_1d, CAST(NULL AS DOUBLE) AS segment_ext_double_value3_1d\n" + "\t, CAST(NULL AS DOUBLE) AS segment_ext_double_value4_1d, ddate\n" + "\t, 'first_pay_user_reg_day_num' AS segment_type, 3 AS user_type_id\n" + "WHERE reg_day_num_td_segment_id IS NOT NULL\n" + "GROUP BY ddate, game_id, plat_id, channel_group_id, channel_id, zone_id, reg_day_num_td_segment_id;", stmt.toString());
// SQLUtils.toSQLString(stmt, DbType.hive)
}
Also used : HiveMultiInsertStatement(com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement) SQLStatement(com.alibaba.druid.sql.ast.SQLStatement)

Aggregations

HiveMultiInsertStatement (com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement)5 HiveInsert (com.alibaba.druid.sql.dialect.hive.ast.HiveInsert)3 SQLStatement (com.alibaba.druid.sql.ast.SQLStatement)1 SQLAllColumnExpr (com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr)1 TableStat (com.alibaba.druid.stat.TableStat)1