use of org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage in project hive by apache.
the class DbNotificationListener method onDropFunction.
/**
* @param fnEvent function event
* @throws MetaException
*/
@Override
public void onDropFunction(DropFunctionEvent fnEvent) throws MetaException {
Function fn = fnEvent.getFunction();
DropFunctionMessage msg = MessageBuilder.getInstance().buildDropFunctionMessage(fn);
NotificationEvent event = new NotificationEvent(0, now(), EventType.DROP_FUNCTION.toString(), msgEncoder.getSerializer().serialize(msg));
event.setCatName(fn.isSetCatName() ? fn.getCatName() : DEFAULT_CATALOG_NAME);
event.setDbName(fn.getDbName());
process(event, fnEvent);
}
use of org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage in project hive by apache.
the class TestDbNotificationListener method dropFunction.
@Test
public void dropFunction() throws Exception {
String defaultDbName = "default";
String funcName = "dropfunction";
String funcName2 = "dropfunction2";
String ownerName = "me";
String funcClass = "o.a.h.h.dropfunction";
String funcClass2 = "o.a.h.h.dropfunction2";
String funcResource = Paths.get(testTempDir, "somewhere").toString();
String funcResource2 = Paths.get(testTempDir, "somewhere2").toString();
Function func = new Function(funcName, defaultDbName, funcClass, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource)));
// Event 1
msClient.createFunction(func);
// Event 2
msClient.dropFunction(defaultDbName, funcName);
// Get notifications from metastore
NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(2, rsp.getEventsSize());
NotificationEvent event = rsp.getEvents().get(1);
assertEquals(firstEventId + 2, event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(EventType.DROP_FUNCTION.toString(), event.getEventType());
assertEquals(defaultDbName, event.getDbName());
// Parse the message field
DropFunctionMessage dropFuncMsg = md.getDropFunctionMessage(event.getMessage());
assertEquals(defaultDbName, dropFuncMsg.getDB());
assertEquals(funcName, dropFuncMsg.getFunctionName());
// Verify the eventID was passed to the non-transactional listener
MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.DROP_FUNCTION, firstEventId + 2);
MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_FUNCTION, firstEventId + 1);
// When hive.metastore.transactional.event.listeners is set,
// a failed event should not create a new notification
func = new Function(funcName2, defaultDbName, funcClass2, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource2)));
msClient.createFunction(func);
DummyRawStoreFailEvent.setEventSucceed(false);
try {
msClient.dropFunction(defaultDbName, funcName2);
fail("Error: drop function should've failed");
} catch (Exception ex) {
// expected
}
rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(3, rsp.getEventsSize());
testEventCounts(defaultDbName, firstEventId, null, null, 3);
}
use of org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage in project hive by apache.
the class DropFunctionHandler method handle.
@Override
public List<Task<?>> handle(Context context) throws SemanticException {
DropFunctionMessage msg = deserializer.getDropFunctionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String qualifiedFunctionName = FunctionUtils.qualifyFunctionName(msg.getFunctionName(), actualDbName);
// When the load is invoked via Scheduler's executor route, the function resources will not be
// there in classpath. Processing drop function event tries to unregister the function resulting
// in ClassNotFoundException being thrown in such case.
// Obtaining FunctionInfo object from FunctionRegistry will add the function's resources URLs to UDFClassLoader.
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(qualifiedFunctionName);
DropFunctionDesc desc = new DropFunctionDesc(qualifiedFunctionName, false, context.eventOnlyReplicationSpec());
Task<DDLWork> dropFunctionTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, desc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
context.log.debug("Added drop function task : {}:{}", dropFunctionTask.getId(), desc.getName());
updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
return Collections.singletonList(dropFunctionTask);
}
Aggregations