use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method parsePrivObject.
protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
boolean isTable;
String object = null;
Map<String, String> partSpec = null;
List<String> columns = null;
ASTNode child = (ASTNode) ast.getChild(0);
ASTNode gchild = (ASTNode) child.getChild(0);
if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
isTable = true;
object = BaseSemanticAnalyzer.getQualifiedTableName(gchild).getNotEmptyDbTable();
} else if (child.getType() == HiveParser.TOK_URI_TYPE || child.getType() == HiveParser.TOK_SERVER_TYPE) {
throw new SemanticException("Hive authorization does not support the URI or SERVER objects");
} else {
isTable = false;
object = BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText());
}
// if partition spec node is present, set partition spec
for (int i = 1; i < child.getChildCount(); i++) {
gchild = (ASTNode) child.getChild(i);
if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
partSpec = BaseSemanticAnalyzer.getPartSpec(gchild);
} else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
columns = BaseSemanticAnalyzer.getColumnNames(gchild);
}
}
return new PrivilegeObjectDesc(isTable, object, partSpec, columns);
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createGrantTask.
@Override
public Task<?> createGrantTask(ASTNode ast, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
boolean grantOption = false;
PrivilegeObjectDesc privilegeObj = null;
if (ast.getChildCount() > 2) {
for (int i = 2; i < ast.getChildCount(); i++) {
ASTNode astChild = (ASTNode) ast.getChild(i);
if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
grantOption = true;
} else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
privilegeObj = analyzePrivilegeObject(astChild, outputs);
}
}
}
String userName = SessionState.getUserFromAuthenticator();
GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption);
return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method analyzePrivilegeListDef.
private List<PrivilegeDesc> analyzePrivilegeListDef(ASTNode node) throws SemanticException {
List<PrivilegeDesc> ret = new ArrayList<PrivilegeDesc>();
for (int i = 0; i < node.getChildCount(); i++) {
ASTNode privilegeDef = (ASTNode) node.getChild(i);
ASTNode privilegeType = (ASTNode) privilegeDef.getChild(0);
Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());
if (privObj == null) {
throw new SemanticException("Undefined privilege " + PrivilegeType.getPrivTypeByToken(privilegeType.getType()));
}
List<String> cols = null;
if (privilegeDef.getChildCount() > 1) {
cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) privilegeDef.getChild(1));
}
PrivilegeDesc privilegeDesc = new PrivilegeDesc(privObj, cols);
ret.add(privilegeDesc);
}
return ret;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class HiveAuthorizationTaskFactoryImpl method createShowRoleGrantTask.
@Override
public Task<?> createShowRoleGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
ASTNode child = (ASTNode) ast.getChild(0);
PrincipalType principalType = PrincipalType.USER;
switch(child.getType()) {
case HiveParser.TOK_USER:
principalType = PrincipalType.USER;
break;
case HiveParser.TOK_GROUP:
principalType = PrincipalType.GROUP;
break;
case HiveParser.TOK_ROLE:
principalType = PrincipalType.ROLE;
break;
}
String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
ShowRoleGrantDesc showRoleGrantDesc = new ShowRoleGrantDesc(principalName, principalType, resultFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, showRoleGrantDesc));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ExpressionFactory method fromString.
public static Expression fromString(final String expression) {
if (expression == null || expression.isEmpty()) {
return null;
}
ParseDriver driver = new ParseDriver();
ASTNode node = null;
try {
node = driver.parseTriggerExpression(expression);
} catch (ParseException e) {
throw new IllegalArgumentException("Invalid expression: " + expression, e);
}
if (node.getChildCount() == 2 && node.getChild(1).getType() == HiveParser.EOF) {
node = (ASTNode) node.getChild(0);
}
if (node.getType() != HiveParser.TOK_TRIGGER_EXPRESSION) {
throw new IllegalArgumentException("Expected trigger expression, got: " + node.toStringTree());
}
if (node.getChildCount() != 3) {
throw new IllegalArgumentException("Only single > condition supported: " + expression);
}
// expression tree when multiple conditions are required. HIVE-17622
if (node.getChild(1).getType() != HiveParser.GREATERTHAN) {
throw new IllegalArgumentException("Invalid predicate in expression");
}
final String counterName = node.getChild(0).getText();
final String counterValueStr = PlanUtils.stripQuotes(node.getChild(2).getText().toLowerCase());
if (counterName.isEmpty()) {
throw new IllegalArgumentException("Counter name cannot be empty!");
}
// look for matches in file system counters
long counterValue;
for (FileSystemCounterLimit.FSCounter fsCounter : FileSystemCounterLimit.FSCounter.values()) {
if (counterName.toUpperCase().endsWith(fsCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, new Validator.SizeValidator());
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
// this is file system counter, valid and create counter
FileSystemCounterLimit fsCounterLimit = FileSystemCounterLimit.fromName(counterName, counterValue);
return createExpression(fsCounterLimit);
}
}
// look for matches in time based counters
for (TimeCounterLimit.TimeCounter timeCounter : TimeCounterLimit.TimeCounter.values()) {
if (counterName.equalsIgnoreCase(timeCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, new Validator.TimeValidator(TimeUnit.MILLISECONDS));
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
TimeCounterLimit timeCounterLimit = new TimeCounterLimit(TimeCounterLimit.TimeCounter.valueOf(counterName.toUpperCase()), counterValue);
return createExpression(timeCounterLimit);
}
}
// look for matches in vertex specific counters
for (VertexCounterLimit.VertexCounter vertexCounter : VertexCounterLimit.VertexCounter.values()) {
if (counterName.equalsIgnoreCase(vertexCounter.name())) {
try {
counterValue = getCounterValue(counterValueStr, null);
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
VertexCounterLimit vertexCounterLimit = new VertexCounterLimit(VertexCounterLimit.VertexCounter.valueOf(counterName.toUpperCase()), counterValue);
return createExpression(vertexCounterLimit);
}
}
// if nothing matches, try creating a custom counter
try {
counterValue = getCounterValue(counterValueStr, null);
if (counterValue < 0) {
throw new IllegalArgumentException("Illegal value for counter limit. Expected a positive long value.");
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid counter value: " + counterValueStr);
}
CustomCounterLimit customCounterLimit = new CustomCounterLimit(counterName, counterValue);
return createExpression(customCounterLimit);
}
Aggregations