use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionSemanticAnalyzer method analyzeCreateFunction.
private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
String className = unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
throw new SemanticException("Temporary function cannot be created with a qualified name.");
}
// find any referenced resources
List<ResourceUri> resources = getResourceList(ast);
if (!isTemporaryFunction && resources == null) {
SESISON_STATE_LOG.warn("permanent functions created without USING clause will not be replicated.");
}
CreateFunctionDesc desc = new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources, null);
rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
addEntities(functionName, className, isTemporaryFunction, resources);
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class AbstractFunctionAnalyzer method addEntities.
/**
* Add write entities to the semantic analyzer to restrict function creation to privileged users.
*/
protected void addEntities(String functionName, String className, boolean isTemporary, List<ResourceUri> resources) throws SemanticException {
// If the function is being added under a database 'namespace', then add an entity representing
// the database (only applicable to permanent/metastore functions).
// We also add a second entity representing the function name.
// The authorization api implementation can decide which entities it wants to use to
// authorize the create/drop function call.
// Add the relevant database 'namespace' as a WriteEntity
Database database = null;
// temporary functions don't have any database 'namespace' associated with it
if (!isTemporary) {
try {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
String databaseName = qualifiedNameParts[0];
functionName = qualifiedNameParts[1];
database = getDatabase(databaseName);
} catch (HiveException e) {
LOG.error("Failed to get database ", e);
throw new SemanticException(e);
}
}
if (database != null) {
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
// Add the function name as a WriteEntity
outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
if (resources != null) {
for (ResourceUri resource : resources) {
String uriPath = resource.getUri();
outputs.add(toWriteEntity(uriPath));
}
}
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class CreateFunctionAnalyzer method getResourceList.
private List<ResourceUri> getResourceList(ASTNode ast) throws SemanticException {
List<ResourceUri> resources = null;
ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
if (resourcesNode != null) {
resources = new ArrayList<ResourceUri>();
for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
// ^(TOK_RESOURCE_URI $resType $resPath)
ASTNode node = (ASTNode) resourcesNode.getChild(idx);
if (node.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
throw new SemanticException("Expected token type TOK_RESOURCE_URI but found " + node.getToken().toString());
}
if (node.getChildCount() != 2) {
throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found " + node.getChildCount());
}
ASTNode resourceTypeNode = (ASTNode) node.getChild(0);
ASTNode resourceUriNode = (ASTNode) node.getChild(1);
ResourceType resourceType = TOKEN_TYPE_TO_RESOURCE_TYPE.get(resourceTypeNode.getType());
if (resourceType == null) {
throw new SemanticException("Unexpected token " + resourceTypeNode);
}
resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resourceUriNode.getText())));
}
}
return resources;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionUtils method toFunctionResource.
public static FunctionResource[] toFunctionResource(List<ResourceUri> resources) throws HiveException {
if (resources == null) {
return null;
}
FunctionResource[] converted = new FunctionResource[resources.size()];
for (int i = 0; i < converted.length; i++) {
ResourceUri resource = resources.get(i);
SessionState.ResourceType type = getResourceType(resource.getResourceType());
converted[i] = new FunctionResource(type, resource.getUri());
}
return converted;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class TestDbNotificationListener method dropFunction.
@Test
public void dropFunction() throws Exception {
String defaultDbName = "default";
String funcName = "dropfunction";
String funcName2 = "dropfunction2";
String ownerName = "me";
String funcClass = "o.a.h.h.dropfunction";
String funcClass2 = "o.a.h.h.dropfunction2";
String funcResource = Paths.get(testTempDir, "somewhere").toString();
String funcResource2 = Paths.get(testTempDir, "somewhere2").toString();
Function func = new Function(funcName, defaultDbName, funcClass, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource)));
// Event 1
msClient.createFunction(func);
// Event 2
msClient.dropFunction(defaultDbName, funcName);
// Get notifications from metastore
NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(2, rsp.getEventsSize());
NotificationEvent event = rsp.getEvents().get(1);
assertEquals(firstEventId + 2, event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(EventType.DROP_FUNCTION.toString(), event.getEventType());
assertEquals(defaultDbName, event.getDbName());
// Parse the message field
DropFunctionMessage dropFuncMsg = md.getDropFunctionMessage(event.getMessage());
assertEquals(defaultDbName, dropFuncMsg.getDB());
assertEquals(funcName, dropFuncMsg.getFunctionName());
// Verify the eventID was passed to the non-transactional listener
MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.DROP_FUNCTION, firstEventId + 2);
MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_FUNCTION, firstEventId + 1);
// When hive.metastore.transactional.event.listeners is set,
// a failed event should not create a new notification
func = new Function(funcName2, defaultDbName, funcClass2, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource2)));
msClient.createFunction(func);
DummyRawStoreFailEvent.setEventSucceed(false);
try {
msClient.dropFunction(defaultDbName, funcName2);
fail("Error: drop function should've failed");
} catch (Exception ex) {
// expected
}
rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(3, rsp.getEventsSize());
testEventCounts(defaultDbName, firstEventId, null, null, 3);
}
Aggregations