Search in sources :

Example 6 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionSemanticAnalyzer method analyzeCreateFunction.

private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
    // ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
    String functionName = ast.getChild(0).getText().toLowerCase();
    boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
    String className = unescapeSQLString(ast.getChild(1).getText());
    // Temp functions are not allowed to have qualified names.
    if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
        throw new SemanticException("Temporary function cannot be created with a qualified name.");
    }
    // find any referenced resources
    List<ResourceUri> resources = getResourceList(ast);
    if (!isTemporaryFunction && resources == null) {
        SESISON_STATE_LOG.warn("permanent functions created without USING  clause will not be replicated.");
    }
    CreateFunctionDesc desc = new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources, null);
    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
    addEntities(functionName, className, isTemporaryFunction, resources);
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) CreateFunctionDesc(org.apache.hadoop.hive.ql.plan.CreateFunctionDesc) FunctionWork(org.apache.hadoop.hive.ql.plan.FunctionWork)

Example 7 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class AbstractFunctionAnalyzer method addEntities.

/**
 * Add write entities to the semantic analyzer to restrict function creation to privileged users.
 */
protected void addEntities(String functionName, String className, boolean isTemporary, List<ResourceUri> resources) throws SemanticException {
    // If the function is being added under a database 'namespace', then add an entity representing
    // the database (only applicable to permanent/metastore functions).
    // We also add a second entity representing the function name.
    // The authorization api implementation can decide which entities it wants to use to
    // authorize the create/drop function call.
    // Add the relevant database 'namespace' as a WriteEntity
    Database database = null;
    // temporary functions don't have any database 'namespace' associated with it
    if (!isTemporary) {
        try {
            String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
            String databaseName = qualifiedNameParts[0];
            functionName = qualifiedNameParts[1];
            database = getDatabase(databaseName);
        } catch (HiveException e) {
            LOG.error("Failed to get database ", e);
            throw new SemanticException(e);
        }
    }
    if (database != null) {
        outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    }
    // Add the function name as a WriteEntity
    outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
    if (resources != null) {
        for (ResourceUri resource : resources) {
            String uriPath = resource.getUri();
            outputs.add(toWriteEntity(uriPath));
        }
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 8 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class CreateFunctionAnalyzer method getResourceList.

private List<ResourceUri> getResourceList(ASTNode ast) throws SemanticException {
    List<ResourceUri> resources = null;
    ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
    if (resourcesNode != null) {
        resources = new ArrayList<ResourceUri>();
        for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
            // ^(TOK_RESOURCE_URI $resType $resPath)
            ASTNode node = (ASTNode) resourcesNode.getChild(idx);
            if (node.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
                throw new SemanticException("Expected token type TOK_RESOURCE_URI but found " + node.getToken().toString());
            }
            if (node.getChildCount() != 2) {
                throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found " + node.getChildCount());
            }
            ASTNode resourceTypeNode = (ASTNode) node.getChild(0);
            ASTNode resourceUriNode = (ASTNode) node.getChild(1);
            ResourceType resourceType = TOKEN_TYPE_TO_RESOURCE_TYPE.get(resourceTypeNode.getType());
            if (resourceType == null) {
                throw new SemanticException("Unexpected token " + resourceTypeNode);
            }
            resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resourceUriNode.getText())));
        }
    }
    return resources;
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 9 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionUtils method toFunctionResource.

public static FunctionResource[] toFunctionResource(List<ResourceUri> resources) throws HiveException {
    if (resources == null) {
        return null;
    }
    FunctionResource[] converted = new FunctionResource[resources.size()];
    for (int i = 0; i < converted.length; i++) {
        ResourceUri resource = resources.get(i);
        SessionState.ResourceType type = getResourceType(resource.getResourceType());
        converted[i] = new FunctionResource(type, resource.getUri());
    }
    return converted;
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) SessionState(org.apache.hadoop.hive.ql.session.SessionState) FunctionResource(org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource)

Example 10 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class TestDbNotificationListener method dropFunction.

@Test
public void dropFunction() throws Exception {
    String defaultDbName = "default";
    String funcName = "dropfunction";
    String funcName2 = "dropfunction2";
    String ownerName = "me";
    String funcClass = "o.a.h.h.dropfunction";
    String funcClass2 = "o.a.h.h.dropfunction2";
    String funcResource = Paths.get(testTempDir, "somewhere").toString();
    String funcResource2 = Paths.get(testTempDir, "somewhere2").toString();
    Function func = new Function(funcName, defaultDbName, funcClass, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource)));
    // Event 1
    msClient.createFunction(func);
    // Event 2
    msClient.dropFunction(defaultDbName, funcName);
    // Get notifications from metastore
    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(2, rsp.getEventsSize());
    NotificationEvent event = rsp.getEvents().get(1);
    assertEquals(firstEventId + 2, event.getEventId());
    assertTrue(event.getEventTime() >= startTime);
    assertEquals(EventType.DROP_FUNCTION.toString(), event.getEventType());
    assertEquals(defaultDbName, event.getDbName());
    // Parse the message field
    DropFunctionMessage dropFuncMsg = md.getDropFunctionMessage(event.getMessage());
    assertEquals(defaultDbName, dropFuncMsg.getDB());
    assertEquals(funcName, dropFuncMsg.getFunctionName());
    // Verify the eventID was passed to the non-transactional listener
    MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.DROP_FUNCTION, firstEventId + 2);
    MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_FUNCTION, firstEventId + 1);
    // When hive.metastore.transactional.event.listeners is set,
    // a failed event should not create a new notification
    func = new Function(funcName2, defaultDbName, funcClass2, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource2)));
    msClient.createFunction(func);
    DummyRawStoreFailEvent.setEventSucceed(false);
    try {
        msClient.dropFunction(defaultDbName, funcName2);
        fail("Error: drop function should've failed");
    } catch (Exception ex) {
    // expected
    }
    rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(3, rsp.getEventsSize());
    testEventCounts(defaultDbName, firstEventId, null, null, 3);
}
Also used : NotificationEventResponse(org.apache.hadoop.hive.metastore.api.NotificationEventResponse) Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) DropFunctionMessage(org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Aggregations

ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)34 Function (org.apache.hadoop.hive.metastore.api.Function)22 Test (org.junit.Test)13 ArrayList (java.util.ArrayList)6 Database (org.apache.hadoop.hive.metastore.api.Database)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 FunctionBuilder (org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder)5 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)5 IOException (java.io.IOException)4 URI (java.net.URI)4 HashSet (java.util.HashSet)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)3 Hive (org.apache.hadoop.hive.ql.metadata.Hive)3 SQLException (java.sql.SQLException)2 HashMap (java.util.HashMap)2 Path (org.apache.hadoop.fs.Path)2 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)2