use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class LineageInfo method getLineageInfo.
/**
* parses given query and gets the lineage info.
*
* @param query
* @throws ParseException
*/
public void getLineageInfo(String query) throws ParseException, SemanticException {
/*
* Get the AST tree
*/
ASTNode tree = ParseUtils.parse(query, null);
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = (ASTNode) tree.getChild(0);
}
/*
* initialize Event Processor and dispatcher.
*/
inputTableList.clear();
OutputTableList.clear();
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>();
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(this, rules, null);
GraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(tree);
ogw.startWalking(topNodes, null);
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createGrantTask.
@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
boolean grantOption = false;
PrivilegeObjectDesc privilegeObj = null;
if (ast.getChildCount() > 2) {
for (int i = 2; i < ast.getChildCount(); i++) {
ASTNode astChild = (ASTNode) ast.getChild(i);
if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
grantOption = true;
} else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
privilegeObj = analyzePrivilegeObject(astChild, outputs);
}
}
}
String userName = SessionState.getUserFromAuthenticator();
GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowRoleGrantTask.
@Override
public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
ASTNode child = (ASTNode) ast.getChild(0);
PrincipalType principalType = PrincipalType.USER;
switch(child.getType()) {
case HiveParser.TOK_USER:
principalType = PrincipalType.USER;
break;
case HiveParser.TOK_GROUP:
principalType = PrincipalType.GROUP;
break;
case HiveParser.TOK_ROLE:
principalType = PrincipalType.ROLE;
break;
}
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType, RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
roleDesc.setResFile(resultFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzeGrantRevokeRole.
private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(0));
// check if admin option has been specified
int rolesStartPos = 1;
ASTNode wAdminOption = (ASTNode) ast.getChild(1);
boolean isAdmin = false;
if ((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)) {
// start reading role names from next position
rolesStartPos = 2;
isAdmin = true;
}
List<String> roles = new ArrayList<String>();
for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
}
String roleOwnerName = SessionState.getUserFromAuthenticator();
// until change is made to use the admin option. Default to false with V2 authorization
GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.
the class WindowFunctionTranslation method translate.
public static WindowFunctionDef translate(QueryDef qDef, TableFuncDef windowTableFnDef, WindowFunctionSpec wFnSpec) throws WindowingException {
QueryTranslationInfo tInfo = qDef.getTranslationInfo();
InputInfo iInfo = tInfo.getInputInfo(windowTableFnDef.getInput());
WindowFunctionDef wFnDef = new WindowFunctionDef();
wFnDef.setSpec(wFnSpec);
/*
* translate args
*/
ArrayList<ASTNode> args = wFnSpec.getArgs();
if (args != null) {
for (ASTNode expr : args) {
ArgDef argDef = translateWindowFunctionArg(qDef, windowTableFnDef, iInfo, expr);
wFnDef.addArg(argDef);
}
}
if (RANKING_FUNCS.contains(wFnSpec.getName())) {
setupRankingArgs(qDef, windowTableFnDef, wFnDef, wFnSpec);
}
WindowDef wDef = translateWindowSpec(qDef, iInfo, wFnSpec);
wFnDef.setWindow(wDef);
validateWindowDefForWFn(windowTableFnDef, wFnDef);
setupEvaluator(wFnDef);
return wFnDef;
}
Aggregations