Search in sources :

Example 86 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method analyzeGrantRevokeRole.

private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(0));
    // check if admin option has been specified
    int rolesStartPos = 1;
    ASTNode wAdminOption = (ASTNode) ast.getChild(1);
    boolean isAdmin = false;
    if ((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)) {
        // start reading role names from next position
        rolesStartPos = 2;
        isAdmin = true;
    }
    List<String> roles = new ArrayList<String>();
    for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
        roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
    }
    String roleOwnerName = SessionState.getUserFromAuthenticator();
    // until change is made to use the admin option. Default to false with V2 authorization
    GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
    return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) GrantRevokeRoleDDL(org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList)

Example 87 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDropPool.

private void analyzeDropPool(ASTNode ast) throws SemanticException {
    if (ast.getChildCount() != 2) {
        throw new SemanticException("Invalid syntax for drop pool.");
    }
    String rpName = unescapeIdentifier(ast.getChild(0).getText());
    String poolPath = poolPath(ast.getChild(1));
    DropWMPoolDesc desc = new DropWMPoolDesc(rpName, poolPath);
    addServiceOutput();
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : DropWMPoolDesc(org.apache.hadoop.hive.ql.plan.DropWMPoolDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 88 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeLockTable.

/**
 * Add the task according to the parsed command tree. This is used for the CLI
 * command "LOCK TABLE ..;".
 *
 * @param ast
 *          The parsed command tree.
 * @throws SemanticException
 *           Parsing failed
 */
private void analyzeLockTable(ASTNode ast) throws SemanticException {
    String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase();
    String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase());
    List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast);
    // We only can have a single partition spec
    assert (partSpecs.size() <= 1);
    Map<String, String> partSpec = null;
    if (partSpecs.size() > 0) {
        partSpec = partSpecs.get(0);
    }
    LockTableDesc lockTblDesc = new LockTableDesc(tableName, mode, partSpec, HiveConf.getVar(conf, ConfVars.HIVEQUERYID));
    lockTblDesc.setQueryStr(this.ctx.getCmd());
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), lockTblDesc)));
    // Need to initialize the lock manager
    ctx.setNeedLockMgr(true);
}
Also used : LockTableDesc(org.apache.hadoop.hive.ql.plan.LockTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 89 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableCompact.

private void analyzeAlterTableCompact(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    String type = unescapeSQLString(ast.getChild(0).getText()).toLowerCase();
    if (!type.equals("minor") && !type.equals("major")) {
        throw new SemanticException(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg());
    }
    LinkedHashMap<String, String> newPartSpec = null;
    if (partSpec != null) {
        newPartSpec = new LinkedHashMap<String, String>(partSpec);
    }
    HashMap<String, String> mapProp = null;
    boolean isBlocking = false;
    for (int i = 0; i < ast.getChildCount(); i++) {
        switch(ast.getChild(i).getType()) {
            case HiveParser.TOK_TABLEPROPERTIES:
                mapProp = getProps((ASTNode) (ast.getChild(i)).getChild(0));
                break;
            case HiveParser.TOK_BLOCKING:
                isBlocking = true;
                break;
        }
    }
    AlterTableSimpleDesc desc = new AlterTableSimpleDesc(tableName, newPartSpec, type, isBlocking);
    desc.setProps(mapProp);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : AlterTableSimpleDesc(org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 90 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableRenameCol.

private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast, HashMap<String, String> partSpec) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
    ASTNode constraintChild = null;
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        ASTNode child = (ASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = (ASTNode) child;
        }
    }
    List<SQLPrimaryKey> primaryKeys = null;
    List<SQLForeignKey> foreignKeys = null;
    List<SQLUniqueConstraint> uniqueConstraints = null;
    List<SQLNotNullConstraint> notNullConstraints = null;
    List<SQLDefaultConstraint> defaultConstraints = null;
    List<SQLCheckConstraint> checkConstraints = null;
    if (constraintChild != null) {
        // Process column constraint
        switch(constraintChild.getToken().getType()) {
            case HiveParser.TOK_CHECK_CONSTRAINT:
                checkConstraints = new ArrayList<>();
                processCheckConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), checkConstraints, (ASTNode) ast.getChild(2), this.ctx.getTokenRewriteStream());
                break;
            case HiveParser.TOK_DEFAULT_VALUE:
                defaultConstraints = new ArrayList<>();
                processDefaultConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), defaultConstraints, (ASTNode) ast.getChild(2));
                break;
            case HiveParser.TOK_NOT_NULL:
                notNullConstraints = new ArrayList<>();
                processNotNullConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), notNullConstraints);
                break;
            case HiveParser.TOK_UNIQUE:
                uniqueConstraints = new ArrayList<>();
                processUniqueConstraints(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), uniqueConstraints);
                break;
            case HiveParser.TOK_PRIMARY_KEY:
                primaryKeys = new ArrayList<>();
                processPrimaryKeys(qualified[0], qualified[1], constraintChild, ImmutableList.of(newColName), primaryKeys);
                break;
            case HiveParser.TOK_FOREIGN_KEY:
                foreignKeys = new ArrayList<>();
                processForeignKeys(qualified[0], qualified[1], constraintChild, foreignKeys);
                break;
            default:
                throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintChild.getToken().getText()));
        }
    }
    /* Validate the operation of renaming a column name. */
    Table tab = getTable(qualified);
    if (checkConstraints != null && !checkConstraints.isEmpty()) {
        validateCheckConstraint(tab.getCols(), checkConstraints, ctx.getConf());
    }
    if (tab.getTableType() == TableType.EXTERNAL_TABLE && hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
    }
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new SemanticException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = getDotName(qualified);
    AlterTableDesc alterTblDesc;
    if (primaryKeys == null && foreignKeys == null && uniqueConstraints == null && notNullConstraints == null && defaultConstraints == null && checkConstraints == null) {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade);
    } else {
        alterTblDesc = new AlterTableDesc(tblName, partSpec, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol, isCascade, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
    }
    addInputsOutputsAlterTable(tblName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)141 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)24 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)24 Table (org.apache.hadoop.hive.ql.metadata.Table)22 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)20 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)20 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)20 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)19 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)19 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)19 HashMap (java.util.HashMap)17 LinkedHashMap (java.util.LinkedHashMap)16 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)14 Task (org.apache.hadoop.hive.ql.exec.Task)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)10 Serializable (java.io.Serializable)9