Search in sources :

Example 96 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ReplLoadTask method createViewTask.

public static Task<?> createViewTask(MetaData metaData, String dbNameToLoadIn, HiveConf conf, String dumpDirectory, ReplicationMetricCollector metricCollector) throws SemanticException {
    Table table = new Table(metaData.getTable());
    String dbName = dbNameToLoadIn == null ? table.getDbName() : dbNameToLoadIn;
    TableName tableName = HiveTableName.ofNullable(table.getTableName(), dbName);
    String dbDotView = tableName.getNotEmptyDbTable();
    String viewOriginalText = table.getViewOriginalText();
    String viewExpandedText = table.getViewExpandedText();
    if (!dbName.equals(table.getDbName())) {
    // TODO: If the DB name doesn't match with the metadata from dump, then need to rewrite the original and expanded
    // texts using new DB name. Currently it refers to the source database name.
    }
    CreateViewDesc desc = new CreateViewDesc(dbDotView, table.getCols(), null, table.getParameters(), table.getPartColNames(), false, false, viewOriginalText, viewExpandedText, table.getPartCols());
    desc.setReplicationSpec(metaData.getReplicationSpec());
    desc.setOwnerName(table.getOwner());
    return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc, true, dumpDirectory, metricCollector), conf);
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) HiveTableName(org.apache.hadoop.hive.ql.parse.HiveTableName) CreateViewDesc(org.apache.hadoop.hive.ql.ddl.view.create.CreateViewDesc) LoadTable(org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table.LoadTable) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) HashSet(java.util.HashSet)

Example 97 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createShowGrantTask.

@Override
public Task<?> createShowGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
    PrincipalDesc principalDesc = null;
    PrivilegeObjectDesc privHiveObj = null;
    ASTNode param = null;
    if (ast.getChildCount() > 0) {
        param = (ASTNode) ast.getChild(0);
        principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
        if (principalDesc != null) {
            // shift one
            param = (ASTNode) ast.getChild(1);
        }
    }
    if (param != null) {
        if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
            privHiveObj = new PrivilegeObjectDesc(true, null, null, null);
        } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
            privHiveObj = parsePrivObject(param);
        }
    }
    ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
    return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) PrivilegeObjectDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ShowGrantDesc(org.apache.hadoop.hive.ql.ddl.privilege.show.grant.ShowGrantDesc)

Example 98 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createShowRolePrincipalsTask.

@Override
public Task<?> createShowRolePrincipalsTask(ASTNode ast, Path resFile, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
    String roleName;
    if (ast.getChildCount() == 1) {
        roleName = ast.getChild(0).getText();
    } else {
        // the parser should not allow this
        throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS");
    }
    ShowPrincipalsDesc showPrincipalsDesc = new ShowPrincipalsDesc(roleName, resFile.toString());
    return TaskFactory.get(new DDLWork(inputs, outputs, showPrincipalsDesc));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ShowPrincipalsDesc(org.apache.hadoop.hive.ql.ddl.privilege.show.principals.ShowPrincipalsDesc)

Example 99 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method analyzeGrantRevokeRole.

private Task<?> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(0));
    // check if admin option has been specified
    int rolesStartPos = 1;
    ASTNode wAdminOption = (ASTNode) ast.getChild(1);
    boolean isAdmin = false;
    if ((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)) {
        // start reading role names from next position
        rolesStartPos = 2;
        isAdmin = true;
    }
    List<String> roles = new ArrayList<String>();
    for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
        roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
    }
    String roleOwnerName = SessionState.getUserFromAuthenticator();
    if (isGrant) {
        GrantRoleDesc grantRoleDesc = new GrantRoleDesc(roles, principalDesc, roleOwnerName, isAdmin);
        return TaskFactory.get(new DDLWork(inputs, outputs, grantRoleDesc));
    } else {
        RevokeRoleDesc revokeRoleDesc = new RevokeRoleDesc(roles, principalDesc, roleOwnerName, isAdmin);
        return TaskFactory.get(new DDLWork(inputs, outputs, revokeRoleDesc));
    }
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) RevokeRoleDesc(org.apache.hadoop.hive.ql.ddl.privilege.role.revoke.RevokeRoleDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList) GrantRoleDesc(org.apache.hadoop.hive.ql.ddl.privilege.role.grant.GrantRoleDesc)

Example 100 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createCreateRoleTask.

@Override
public Task<?> createCreateRoleTask(ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
    String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName);
    return TaskFactory.get(new DDLWork(inputs, outputs, createRoleDesc));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateRoleDesc(org.apache.hadoop.hive.ql.ddl.privilege.role.create.CreateRoleDesc)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8