use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class TaskCompiler method patchUpAfterCTASorMaterializedView.
private void patchUpAfterCTASorMaterializedView(final List<Task<? extends Serializable>> rootTasks, final HashSet<WriteEntity> outputs, Task<? extends Serializable> createTask) {
// clear the mapredWork output file from outputs for CTAS
// DDLWork at the tail of the chain will have the output
Iterator<WriteEntity> outIter = outputs.iterator();
while (outIter.hasNext()) {
switch(outIter.next().getType()) {
case DFS_DIR:
case LOCAL_DIR:
outIter.remove();
break;
default:
break;
}
}
// find all leaf tasks and make the DDLTask as a dependent task on all of them
HashSet<Task<? extends Serializable>> leaves = new LinkedHashSet<>();
getLeafTasks(rootTasks, leaves);
assert (leaves.size() > 0);
Task<? extends Serializable> targetTask = createTask;
for (Task<? extends Serializable> task : leaves) {
if (task instanceof StatsTask) {
// StatsTask require table to already exist
for (Task<? extends Serializable> parentOfStatsTask : task.getParentTasks()) {
parentOfStatsTask.addDependentTask(createTask);
}
for (Task<? extends Serializable> parentOfCrtTblTask : createTask.getParentTasks()) {
parentOfCrtTblTask.removeDependentTask(task);
}
createTask.addDependentTask(task);
targetTask = task;
} else {
task.addDependentTask(createTask);
}
}
// Add task to insert / delete materialized view from registry if needed
if (createTask instanceof DDLTask) {
DDLTask ddlTask = (DDLTask) createTask;
DDLWork work = ddlTask.getWork();
String tableName = null;
boolean retrieveAndInclude = false;
boolean disableRewrite = false;
if (work.getCreateViewDesc() != null && work.getCreateViewDesc().isMaterialized()) {
tableName = work.getCreateViewDesc().getViewName();
retrieveAndInclude = work.getCreateViewDesc().isRewriteEnabled();
} else if (work.getAlterMaterializedViewDesc() != null) {
tableName = work.getAlterMaterializedViewDesc().getMaterializedViewName();
if (work.getAlterMaterializedViewDesc().isRewriteEnable()) {
retrieveAndInclude = true;
} else {
disableRewrite = true;
}
} else {
return;
}
targetTask.addDependentTask(TaskFactory.get(new MaterializedViewDesc(tableName, retrieveAndInclude, disableRewrite, false), conf));
}
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createRevokeTask.
@Override
public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
PrivilegeObjectDesc hiveObj = null;
boolean grantOption = false;
if (ast.getChildCount() > 2) {
ASTNode astChild = (ASTNode) ast.getChild(2);
hiveObj = analyzePrivilegeObject(astChild, outputs);
if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
grantOption = true;
}
}
RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowRolesTask.
@Override
public Task<? extends Serializable> createShowRolesTask(ASTNode ast, Path resFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
RoleDDLDesc showRolesDesc = new RoleDDLDesc(null, null, RoleDDLDesc.RoleOperation.SHOW_ROLES, null);
showRolesDesc.setResFile(resFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowCurrentRoleTask.
@Override
public Task<? extends Serializable> createShowCurrentRoleTask(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs, Path resFile) throws SemanticException {
RoleDDLDesc ddlDesc = new RoleDDLDesc(null, RoleDDLDesc.RoleOperation.SHOW_CURRENT_ROLE);
ddlDesc.setResFile(resFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowGrantTask.
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
PrincipalDesc principalDesc = null;
PrivilegeObjectDesc privHiveObj = null;
ASTNode param = null;
if (ast.getChildCount() > 0) {
param = (ASTNode) ast.getChild(0);
principalDesc = AuthorizationParseUtils.getPrincipalDesc(param);
if (principalDesc != null) {
// shift one
param = (ASTNode) ast.getChild(1);
}
}
if (param != null) {
if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
privHiveObj = new PrivilegeObjectDesc();
} else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = parsePrivObject(param);
}
}
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj);
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
}
Aggregations