Search in sources :

Example 76 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method parsePrivObject.

protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
    boolean isTable;
    String object = null;
    Map<String, String> partSpec = null;
    List<String> columns = null;
    ASTNode child = (ASTNode) ast.getChild(0);
    ASTNode gchild = (ASTNode) child.getChild(0);
    if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
        isTable = true;
        object = BaseSemanticAnalyzer.getQualifiedTableName(gchild).getNotEmptyDbTable();
    } else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) {
        throw new SemanticException("Hive authorization does not support the URI or SERVER objects");
    } else {
        isTable = false;
        object = BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText());
    }
    // if partition spec node is present, set partition spec
    for (int i = 1; i < child.getChildCount(); i++) {
        gchild = (ASTNode) child.getChild(i);
        if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
            partSpec = BaseSemanticAnalyzer.getPartSpec(gchild);
        } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
            columns = BaseSemanticAnalyzer.getColumnNames(gchild);
        }
    }
    return new PrivilegeObjectDesc(isTable, object, partSpec, columns);
}
Also used : PrivilegeObjectDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 77 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createGrantTask.

@Override
public Task<?> createGrantTask(ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
    List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
    List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
    boolean grantOption = false;
    PrivilegeObjectDesc privilegeObj = null;
    if (ast.getChildCount() > 2) {
        for (int i = 2; i < ast.getChildCount(); i++) {
            ASTNode astChild = (ASTNode) ast.getChild(i);
            if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
                grantOption = true;
            } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
                privilegeObj = analyzePrivilegeObject(astChild, outputs);
            }
        }
    }
    String userName = SessionState.getUserFromAuthenticator();
    GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
    return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
}
Also used : PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) PrivilegeObjectDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeObjectDesc) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) GrantDesc(org.apache.hadoop.hive.ql.ddl.privilege.grant.GrantDesc) ShowRoleGrantDesc(org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc) ShowGrantDesc(org.apache.hadoop.hive.ql.ddl.privilege.show.grant.ShowGrantDesc) PrivilegeDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)

Example 78 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method analyzePrivilegeListDef.

private List<PrivilegeDesc> analyzePrivilegeListDef(ASTNode node) throws SemanticException {
    List<PrivilegeDesc> ret = new ArrayList<PrivilegeDesc>();
    for (int i = 0; i < node.getChildCount(); i++) {
        ASTNode privilegeDef = (ASTNode) node.getChild(i);
        ASTNode privilegeType = (ASTNode) privilegeDef.getChild(0);
        Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());
        if (privObj == null) {
            throw new SemanticException("Undefined privilege " + PrivilegeType.getPrivTypeByToken(privilegeType.getType()));
        }
        List<String> cols = null;
        if (privilegeDef.getChildCount() > 1) {
            cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) privilegeDef.getChild(1));
        }
        PrivilegeDesc privilegeDesc = new PrivilegeDesc(privObj, cols);
        ret.add(privilegeDesc);
    }
    return ret;
}
Also used : ArrayList(java.util.ArrayList) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Privilege(org.apache.hadoop.hive.ql.security.authorization.Privilege) PrivilegeDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 79 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class HiveAuthorizationTaskFactoryImpl method createShowRoleGrantTask.

@Override
public Task<?> createShowRoleGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
    ASTNode child = (ASTNode) ast.getChild(0);
    PrincipalType principalType = PrincipalType.USER;
    switch(child.getType()) {
        case HiveParser.TOK_USER:
            principalType = PrincipalType.USER;
            break;
        case HiveParser.TOK_GROUP:
            principalType = PrincipalType.GROUP;
            break;
        case HiveParser.TOK_ROLE:
            principalType = PrincipalType.ROLE;
            break;
    }
    String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
    ShowRoleGrantDesc showRoleGrantDesc = new ShowRoleGrantDesc(principalName, principalType, resultFile.toString());
    return TaskFactory.get(new DDLWork(inputs, outputs, showRoleGrantDesc));
}
Also used : ShowRoleGrantDesc(org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType)

Example 80 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ExpressionFactory method fromString.

public static Expression fromString(final String expression) {
    if (expression == null || expression.isEmpty()) {
        return null;
    }
    ParseDriver driver = new ParseDriver();
    ASTNode node = null;
    try {
        node = driver.parseTriggerExpression(expression);
    } catch (ParseException e) {
        throw new IllegalArgumentException("Invalid expression: " + expression, e);
    }
    if (node.getChildCount() == 2 && node.getChild(1).getType() == HiveParser.EOF) {
        node = (ASTNode) node.getChild(0);
    }
    if (node.getType() != HiveParser.TOK_TRIGGER_EXPRESSION) {
        throw new IllegalArgumentException("Expected trigger expression, got: " + node.toStringTree());
    }
    if (node.getChildCount() != 3) {
        throw new IllegalArgumentException("Only single > condition supported: " + expression);
    }
    // expression tree when multiple conditions are required. HIVE-17622
    if (node.getChild(1).getType() != HiveParser.GREATERTHAN) {
        throw new IllegalArgumentException("Invalid predicate in expression");
    }
    final String counterName = node.getChild(0).getText();
    final String counterValueStr = PlanUtils.stripQuotes(node.getChild(2).getText().toLowerCase());
    if (counterName.isEmpty()) {
        throw new IllegalArgumentException("Counter name cannot be empty!");
    }
    // look for matches in file system counters
    long counterValue;
    for (FileSystemCounterLimit.FSCounter fsCounter : FileSystemCounterLimit.FSCounter.values()) {
        if (counterName.toUpperCase().endsWith(fsCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, new Validator.SizeValidator());
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            // this is file system counter, valid and create counter
            FileSystemCounterLimit fsCounterLimit = FileSystemCounterLimit.fromName(counterName, counterValue);
            return createExpression(fsCounterLimit);
        }
    }
    // look for matches in time based counters
    for (TimeCounterLimit.TimeCounter timeCounter : TimeCounterLimit.TimeCounter.values()) {
        if (counterName.equalsIgnoreCase(timeCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, new Validator.TimeValidator(TimeUnit.MILLISECONDS));
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            TimeCounterLimit timeCounterLimit = new TimeCounterLimit(TimeCounterLimit.TimeCounter.valueOf(counterName.toUpperCase()), counterValue);
            return createExpression(timeCounterLimit);
        }
    }
    // look for matches in vertex specific counters
    for (VertexCounterLimit.VertexCounter vertexCounter : VertexCounterLimit.VertexCounter.values()) {
        if (counterName.equalsIgnoreCase(vertexCounter.name())) {
            try {
                counterValue = getCounterValue(counterValueStr, null);
                if (counterValue < 0) {
                    throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
                }
            } catch (NumberFormatException e) {
                throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
            }
            VertexCounterLimit vertexCounterLimit = new VertexCounterLimit(VertexCounterLimit.VertexCounter.valueOf(counterName.toUpperCase()), counterValue);
            return createExpression(vertexCounterLimit);
        }
    }
    // if nothing matches, try creating a custom counter
    try {
        counterValue = getCounterValue(counterValueStr, null);
        if (counterValue < 0) {
            throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
        }
    } catch (NumberFormatException e) {
        throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
    }
    CustomCounterLimit customCounterLimit = new CustomCounterLimit(counterName, counterValue);
    return createExpression(customCounterLimit);
}
Also used : ParseDriver(org.apache.hadoop.hive.ql.parse.ParseDriver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ParseException(org.apache.hadoop.hive.ql.parse.ParseException)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4