Search in sources :

Example 61 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class LineageInfo method getLineageInfo.

/**
 * parses given query and gets the lineage info.
 *
 * @param query
 * @throws ParseException
 */
public void getLineageInfo(String query) throws ParseException, SemanticException {
    /*
     * Get the AST tree
     */
    ASTNode tree = ParseUtils.parse(query, null);
    while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
        tree = (ASTNode) tree.getChild(0);
    }
    /*
     * initialize Event Processor and dispatcher.
     */
    inputTableList.clear();
    OutputTableList.clear();
    // create a walker which walks the tree in a DFS manner while maintaining
    // the operator stack. The dispatcher
    // generates the plan from the operator tree
    Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>();
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    Dispatcher disp = new DefaultRuleDispatcher(this, rules, null);
    GraphWalker ogw = new DefaultGraphWalker(disp);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.add(tree);
    ogw.startWalking(topNodes, null);
}
Also used : NodeProcessor(org.apache.hadoop.hive.ql.lib.NodeProcessor) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList) Rule(org.apache.hadoop.hive.ql.lib.Rule) Dispatcher(org.apache.hadoop.hive.ql.lib.Dispatcher) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) GraphWalker(org.apache.hadoop.hive.ql.lib.GraphWalker) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) LinkedHashMap(java.util.LinkedHashMap)

Example 62 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createGrantTask.

@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
    List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
    boolean grantOption = false;
    PrivilegeObjectDesc privilegeObj = null;
    if (ast.getChildCount() > 2) {
        for (int i = 2; i < ast.getChildCount(); i++) {
            ASTNode astChild = (ASTNode) ast.getChild(i);
            if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
                grantOption = true;
            } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
                privilegeObj = analyzePrivilegeObject(astChild, outputs);
            }
        }
    }
    String userName = SessionState.getUserFromAuthenticator();
    GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
    return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) PrivilegeObjectDesc(org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) GrantDesc(org.apache.hadoop.hive.ql.plan.GrantDesc) ShowGrantDesc(org.apache.hadoop.hive.ql.plan.ShowGrantDesc) PrivilegeDesc(org.apache.hadoop.hive.ql.plan.PrivilegeDesc)

Example 63 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createShowRoleGrantTask.

@Override
public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
    ASTNode child = (ASTNode) ast.getChild(0);
    PrincipalType principalType = PrincipalType.USER;
    switch(child.getType()) {
        case HiveParser.TOK_USER:
            principalType = PrincipalType.USER;
            break;
        case HiveParser.TOK_GROUP:
            principalType = PrincipalType.GROUP;
            break;
        case HiveParser.TOK_ROLE:
            principalType = PrincipalType.ROLE;
            break;
    }
    String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
    RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType, RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
    roleDesc.setResFile(resultFile.toString());
    return TaskFactory.get(new DDLWork(inputs, outputs, roleDesc));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) RoleDDLDesc(org.apache.hadoop.hive.ql.plan.RoleDDLDesc) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType)

Example 64 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method analyzeGrantRevokeRole.

private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast, HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) {
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(0));
    // check if admin option has been specified
    int rolesStartPos = 1;
    ASTNode wAdminOption = (ASTNode) ast.getChild(1);
    boolean isAdmin = false;
    if ((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)) {
        // start reading role names from next position
        rolesStartPos = 2;
        isAdmin = true;
    }
    List<String> roles = new ArrayList<String>();
    for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
        roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
    }
    String roleOwnerName = SessionState.getUserFromAuthenticator();
    // until change is made to use the admin option. Default to false with V2 authorization
    GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant, roles, principalDesc, roleOwnerName, PrincipalType.USER, isAdmin);
    return TaskFactory.get(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) GrantRevokeRoleDDL(org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ArrayList(java.util.ArrayList)

Example 65 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project SQLWindowing by hbutani.

the class WindowFunctionTranslation method translate.

public static WindowFunctionDef translate(QueryDef qDef, TableFuncDef windowTableFnDef, WindowFunctionSpec wFnSpec) throws WindowingException {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    InputInfo iInfo = tInfo.getInputInfo(windowTableFnDef.getInput());
    WindowFunctionDef wFnDef = new WindowFunctionDef();
    wFnDef.setSpec(wFnSpec);
    /*
		 * translate args
		 */
    ArrayList<ASTNode> args = wFnSpec.getArgs();
    if (args != null) {
        for (ASTNode expr : args) {
            ArgDef argDef = translateWindowFunctionArg(qDef, windowTableFnDef, iInfo, expr);
            wFnDef.addArg(argDef);
        }
    }
    if (RANKING_FUNCS.contains(wFnSpec.getName())) {
        setupRankingArgs(qDef, windowTableFnDef, wFnDef, wFnSpec);
    }
    WindowDef wDef = translateWindowSpec(qDef, iInfo, wFnSpec);
    wFnDef.setWindow(wDef);
    validateWindowDefForWFn(windowTableFnDef, wFnDef);
    setupEvaluator(wFnDef);
    return wFnDef;
}
Also used : InputInfo(com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo) WindowDef(com.sap.hadoop.windowing.query2.definition.WindowDef) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) WindowFunctionDef(com.sap.hadoop.windowing.query2.definition.WindowFunctionDef) ArgDef(com.sap.hadoop.windowing.query2.definition.ArgDef)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4