use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TestHiveAuthorizationTaskFactory method testRevokeGroupTable.
/**
* REVOKE ... ON TABLE ... FROM GROUP ...
*/
@Test
public void testRevokeGroupTable() throws Exception {
DDLWork work = analyze("REVOKE " + SELECT + " ON TABLE " + TABLE + " FROM GROUP " + GROUP);
RevokeDesc grantDesc = work.getRevokeDesc();
Assert.assertNotNull("Revoke should not be null", grantDesc);
for (PrincipalDesc principal : ListSizeMatcher.inList(grantDesc.getPrincipals()).ofSize(1)) {
Assert.assertEquals(PrincipalType.GROUP, principal.getType());
Assert.assertEquals(GROUP, principal.getName());
}
for (PrivilegeDesc privilege : ListSizeMatcher.inList(grantDesc.getPrivileges()).ofSize(1)) {
Assert.assertEquals(Privilege.SELECT, privilege.getPrivilege());
}
Assert.assertTrue("Expected table", grantDesc.getPrivilegeSubjectDesc().getTable());
Assert.assertEquals(TABLE_QNAME, grantDesc.getPrivilegeSubjectDesc().getObject());
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TestHiveDecimalParse method getColumnType.
private String getColumnType(String query) {
Driver driver = createDriver();
int rc = driver.compile(query);
if (rc != 0) {
return null;
}
QueryPlan plan = driver.getPlan();
DDLTask task = (DDLTask) plan.getRootTasks().get(0);
DDLWork work = task.getWork();
CreateTableDesc spec = work.getCreateTblDesc();
FieldSchema fs = spec.getCols().get(0);
return fs.getType();
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTablePartColType.
private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast) throws SemanticException {
// check if table exists.
Table tab = getTable(qualified);
inputs.add(new ReadEntity(tab));
// validate the DDL is a valid operation on the table.
validateAlterTableType(tab, AlterTableTypes.ALTERPARTITION, false);
// Alter table ... partition column ( column newtype) only takes one column at a time.
// It must have a column name followed with type.
ASTNode colAst = (ASTNode) ast.getChild(0);
FieldSchema newCol = new FieldSchema();
// get column name
String name = colAst.getChild(0).getText().toLowerCase();
newCol.setName(unescapeIdentifier(name));
// get column type
ASTNode typeChild = (ASTNode) (colAst.getChild(1));
newCol.setType(getTypeStringFromAST(typeChild));
if (colAst.getChildCount() == 3) {
newCol.setComment(unescapeSQLString(colAst.getChild(2).getText()));
}
// check if column is defined or not
boolean fFoundColumn = false;
for (FieldSchema col : tab.getTTable().getPartitionKeys()) {
if (col.getName().compareTo(newCol.getName()) == 0) {
fFoundColumn = true;
}
}
// raise error if we could not find the column
if (!fFoundColumn) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(newCol.getName()));
}
AlterTableAlterPartDesc alterTblAlterPartDesc = new AlterTableAlterPartDesc(getDotName(qualified), newCol);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblAlterPartDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeShowColumns.
private void analyzeShowColumns(ASTNode ast) throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
if (ast.getChildCount() > 1) {
if (tableName.contains(".")) {
throw new SemanticException("Duplicates declaration for database name");
}
tableName = getUnescapedName((ASTNode) ast.getChild(1)) + "." + tableName;
}
Table tab = getTable(tableName);
inputs.add(new ReadEntity(tab));
ShowColumnsDesc showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showColumnsDesc), conf));
setFetchTask(createFetchTask(showColumnsDesc.getSchema()));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzeGrantRevokeRole.
private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(0));
//check if admin option has been specified
int rolesStartPos = 1;
ASTNode wAdminOption = (ASTNode) ast.getChild(1);
boolean isAdmin = false;
if ((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)) {
//start reading role names from next position
rolesStartPos = 2;
isAdmin = true;
}
List<String> roles = new ArrayList<String>();
for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
}
String roleOwnerName = SessionState.getUserFromAuthenticator();
//until change is made to use the admin option. Default to false with V2 authorization
GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL), conf);
}
Aggregations