Search in sources :

Example 6 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestHBaseStore method createFunction.

@Test
public void createFunction() throws Exception {
    String funcName = "createfunc";
    int now = (int) (System.currentTimeMillis() / 1000);
    Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere")));
    store.createFunction(func);
    Function f = store.getFunction(DB, funcName);
    Assert.assertEquals(DB, f.getDbName());
    Assert.assertEquals(funcName, f.getFunctionName());
    Assert.assertEquals("o.a.h.h.myfunc", f.getClassName());
    Assert.assertEquals("me", f.getOwnerName());
    Assert.assertEquals(PrincipalType.USER, f.getOwnerType());
    Assert.assertTrue(now <= f.getCreateTime());
    Assert.assertEquals(FunctionType.JAVA, f.getFunctionType());
    Assert.assertEquals(1, f.getResourceUrisSize());
    Assert.assertEquals(ResourceType.JAR, f.getResourceUris().get(0).getResourceType());
    Assert.assertEquals("file:/tmp/somewhere", f.getResourceUris().get(0).getUri());
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) Test(org.junit.Test)

Example 7 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestDbNotificationListener method dropFunction.

@Test
public void dropFunction() throws Exception {
    String defaultDbName = "default";
    String funcName = "dropfunction";
    String funcName2 = "dropfunction2";
    String ownerName = "me";
    String funcClass = "o.a.h.h.dropfunction";
    String funcClass2 = "o.a.h.h.dropfunction2";
    String funcResource = "file:/tmp/somewhere";
    String funcResource2 = "file:/tmp/somewhere2";
    Function func = new Function(funcName, defaultDbName, funcClass, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource)));
    // Event 1
    msClient.createFunction(func);
    // Event 2
    msClient.dropFunction(defaultDbName, funcName);
    // Get notifications from metastore
    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(2, rsp.getEventsSize());
    NotificationEvent event = rsp.getEvents().get(1);
    assertEquals(firstEventId + 2, event.getEventId());
    assertTrue(event.getEventTime() >= startTime);
    assertEquals(EventType.DROP_FUNCTION.toString(), event.getEventType());
    assertEquals(defaultDbName, event.getDbName());
    // Parse the message field
    DropFunctionMessage dropFuncMsg = md.getDropFunctionMessage(event.getMessage());
    assertEquals(defaultDbName, dropFuncMsg.getDB());
    assertEquals(funcName, dropFuncMsg.getFunctionName());
    // Verify the eventID was passed to the non-transactional listener
    MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.DROP_FUNCTION, firstEventId + 2);
    MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_FUNCTION, firstEventId + 1);
    // When hive.metastore.transactional.event.listeners is set,
    // a failed event should not create a new notification
    func = new Function(funcName2, defaultDbName, funcClass2, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource2)));
    msClient.createFunction(func);
    DummyRawStoreFailEvent.setEventSucceed(false);
    try {
        msClient.dropFunction(defaultDbName, funcName2);
        fail("Error: drop function should've failed");
    } catch (Exception ex) {
    // expected
    }
    rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(3, rsp.getEventsSize());
}
Also used : NotificationEventResponse(org.apache.hadoop.hive.metastore.api.NotificationEventResponse) Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) DropFunctionMessage(org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 8 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class DbNotificationListener method onDropFunction.

/**
 * @param fnEvent function event
 * @throws MetaException
 */
@Override
public void onDropFunction(DropFunctionEvent fnEvent) throws MetaException {
    Function fn = fnEvent.getFunction();
    NotificationEvent event = new NotificationEvent(0, now(), EventType.DROP_FUNCTION.toString(), msgFactory.buildDropFunctionMessage(fn).toString());
    event.setDbName(fn.getDbName());
    process(event, fnEvent);
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 9 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestObjectStore method dropAllStoreObjects.

private static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
    try {
        Deadline.registerIfNot(100000);
        List<Function> functions = store.getAllFunctions();
        for (Function func : functions) {
            store.dropFunction(func.getDbName(), func.getFunctionName());
        }
        List<String> dbs = store.getAllDatabases();
        for (String db : dbs) {
            List<String> tbls = store.getAllTables(db);
            for (String tbl : tbls) {
                Deadline.startTimer("getPartition");
                List<Partition> parts = store.getPartitions(db, tbl, 100);
                for (Partition part : parts) {
                    store.dropPartition(db, tbl, part.getValues());
                }
                // Find any constraints and drop them
                Set<String> constraints = new HashSet<>();
                List<SQLPrimaryKey> pk = store.getPrimaryKeys(db, tbl);
                if (pk != null) {
                    for (SQLPrimaryKey pkcol : pk) {
                        constraints.add(pkcol.getPk_name());
                    }
                }
                List<SQLForeignKey> fks = store.getForeignKeys(null, null, db, tbl);
                if (fks != null) {
                    for (SQLForeignKey fkcol : fks) {
                        constraints.add(fkcol.getFk_name());
                    }
                }
                for (String constraint : constraints) {
                    store.dropConstraint(db, tbl, constraint);
                }
                store.dropTable(db, tbl);
            }
            store.dropDatabase(db);
        }
        List<String> roles = store.listRoleNames();
        for (String role : roles) {
            store.removeRole(role);
        }
    } catch (NoSuchObjectException e) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Function(org.apache.hadoop.hive.metastore.api.Function) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HashSet(java.util.HashSet)

Example 10 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestDatabases method testDropDatabaseWithFunctionCascade.

@Test
public void testDropDatabaseWithFunctionCascade() throws Exception {
    Database database = testDatabases[0];
    Function testFunction = new FunctionBuilder().setDbName(database.getName()).setName("test_function").setClass("org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper").build();
    client.createFunction(testFunction);
    client.dropDatabase(database.getName(), true, true, true);
    Assert.assertFalse("The directory should be removed", metaStore.isPathExists(new Path(database.getLocationUri())));
}
Also used : Path(org.apache.hadoop.fs.Path) Function(org.apache.hadoop.hive.metastore.api.Function) FunctionBuilder(org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

Function (org.apache.hadoop.hive.metastore.api.Function)69 Test (org.junit.Test)47 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)38 ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)17 TTransportException (org.apache.thrift.transport.TTransportException)11 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)9 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)8 ArrayList (java.util.ArrayList)7 IOException (java.io.IOException)6 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)6 FunctionBuilder (org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder)6 Database (org.apache.hadoop.hive.metastore.api.Database)4 NotificationEvent (org.apache.hadoop.hive.metastore.api.NotificationEvent)4 TApplicationException (org.apache.thrift.TApplicationException)4 TException (org.apache.thrift.TException)4 HashSet (java.util.HashSet)3 Path (org.apache.hadoop.fs.Path)3 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3 Partition (org.apache.hadoop.hive.metastore.api.Partition)3 SQLException (java.sql.SQLException)2