use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestHBaseSchemaTool method oneMondoTest.
@Test
public void oneMondoTest() throws Exception {
// This is a pain to do in one big test, but we have to control the order so that we have tests
// without dbs, etc.
HBaseSchemaTool tool = new HBaseSchemaTool();
ByteArrayOutputStream outStr = new ByteArrayOutputStream();
PrintStream out = new PrintStream(outStr);
ByteArrayOutputStream errStr = new ByteArrayOutputStream();
PrintStream err = new PrintStream(errStr);
// This needs to be up front before we create any tables or partitions
tool.go(false, HBaseReadWrite.SD_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("No storage descriptors" + lsep, outStr.toString());
// This one needs to be up front too
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SEQUENCES_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("No sequences" + lsep, outStr.toString());
// Create some databases
String[] dbNames = new String[3];
for (int i = 0; i < dbNames.length; i++) {
dbNames[i] = "db" + i;
Database db = new Database(dbNames[i], "no description", "file:///tmp", emptyParameters);
store.createDatabase(db);
}
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.DB_TABLE, "db0", null, conf, out, err);
Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.DB_TABLE, null, ".*", conf, out, err);
Assert.assertEquals("{\"name\":\"db0\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db1\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db2\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.DB_TABLE, null, "db[12]", conf, out, err);
Assert.assertEquals("{\"name\":\"db1\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep + "{\"name\":\"db2\",\"description\":\"no description\"," + "\"locationUri\":\"file:///tmp\",\"parameters\":{}}" + lsep, outStr.toString());
String[] roleNames = new String[2];
for (int i = 0; i < roleNames.length; i++) {
roleNames[i] = "role" + i;
store.addRole(roleNames[i], "me");
}
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.ROLE_TABLE, null, "role.", conf, out, err);
Assert.assertEquals("{\"roleName\":\"role0\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep + "{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep, outStr.toString().replaceAll("createTime\":[0-9]+", "createTime\":now"));
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.ROLE_TABLE, "role1", null, conf, out, err);
Assert.assertEquals("{\"roleName\":\"role1\",\"createTime\":now,\"ownerName\":\"me\"}" + lsep, outStr.toString().replaceAll("createTime\":[0-9]+", "createTime\":now"));
Role role1 = store.getRole("role1");
store.grantRole(role1, "fred", PrincipalType.USER, "me", PrincipalType.USER, false);
store.grantRole(role1, "joanne", PrincipalType.USER, "me", PrincipalType.USER, false);
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.USER_TO_ROLE_TABLE, null, ".*", conf, out, err);
Assert.assertEquals("fred: role1" + lsep + "joanne: role1" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.USER_TO_ROLE_TABLE, "joanne", null, conf, out, err);
Assert.assertEquals("role1" + lsep, outStr.toString());
String[] funcNames = new String[3];
for (int i = 0; i < funcNames.length; i++) {
funcNames[i] = "func" + i;
Function function = new Function(funcNames[i], "db1", "Function", "me", PrincipalType.USER, 0, FunctionType.JAVA, null);
store.createFunction(function);
}
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.FUNC_TABLE, "db1.func0", null, conf, out, err);
Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.FUNC_TABLE, null, ".*", conf, out, err);
Assert.assertEquals("{\"functionName\":\"func0\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func1\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func2\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.FUNC_TABLE, null, "db1.func[12]", conf, out, err);
Assert.assertEquals("{\"functionName\":\"func1\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep + "{\"functionName\":\"func2\",\"dbName\":\"db1\"," + "\"className\":\"Function\",\"ownerName\":\"me\",\"ownerType\":1,\"createTime\":0," + "\"functionType\":1}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.GLOBAL_PRIVS_TABLE, null, null, conf, out, err);
Assert.assertEquals("No global privileges" + lsep, outStr.toString());
List<HiveObjectPrivilege> privileges = new ArrayList<>();
HiveObjectRef hiveObjRef = new HiveObjectRef(HiveObjectType.GLOBAL, "db0", "tab0", null, null);
PrivilegeGrantInfo grantInfo = new PrivilegeGrantInfo("read", 0, "me", PrincipalType.USER, false);
HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, "user", PrincipalType.USER, grantInfo);
privileges.add(hop);
grantInfo = new PrivilegeGrantInfo("create", 0, "me", PrincipalType.USER, true);
hop = new HiveObjectPrivilege(hiveObjRef, "user", PrincipalType.USER, grantInfo);
privileges.add(hop);
PrivilegeBag pBag = new PrivilegeBag(privileges);
store.grantPrivileges(pBag);
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.GLOBAL_PRIVS_TABLE, null, null, conf, out, err);
Assert.assertEquals("{\"userPrivileges\":{\"user\":[{\"privilege\":\"read\",\"createTime\":0," + "\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":0},{\"privilege\":\"create\"," + "\"createTime\":0,\"grantor\":\"me\",\"grantorType\":1,\"grantOption\":1}]}}" + lsep, outStr.toString());
String[] tableNames = new String[3];
for (int i = 0; i < tableNames.length; i++) {
tableNames[i] = "tab" + i;
StorageDescriptor sd = new StorageDescriptor(Arrays.asList(new FieldSchema("col1", "int", ""), new FieldSchema("col2", "varchar(32)", "")), "/tmp", null, null, false, 0, null, null, null, Collections.<String, String>emptyMap());
Table tab = new Table(tableNames[i], dbNames[0], "me", 0, 0, 0, sd, Arrays.asList(new FieldSchema("pcol1", "string", ""), new FieldSchema("pcol2", "string", "")), Collections.<String, String>emptyMap(), null, null, null);
store.createTable(tab);
}
ColumnStatisticsDesc tableStatsDesc = new ColumnStatisticsDesc(false, "db0", "tab0");
ColumnStatisticsData tcsd = new ColumnStatisticsData();
LongColumnStatsData tlcsd = new LongColumnStatsData(1, 2);
tlcsd.setLowValue(-95);
tlcsd.setHighValue(95);
tcsd.setLongStats(tlcsd);
ColumnStatisticsData tcsd2 = new ColumnStatisticsData();
tcsd2.setStringStats(new StringColumnStatsData(97, 18.78, 29, 397));
List<ColumnStatisticsObj> tcsos = Arrays.asList(new ColumnStatisticsObj("col1", "int", tcsd), new ColumnStatisticsObj("col2", "varchar(32)", tcsd2));
ColumnStatistics tStatObj = new ColumnStatistics(tableStatsDesc, tcsos);
store.updateTableColumnStatistics(tStatObj);
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.TABLE_TABLE, "db0.tab1", null, conf, out, err);
Assert.assertEquals("{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"}," + "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{}," + "\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.TABLE_TABLE, null, "db0.*", conf, out, err);
Assert.assertEquals("{\"tableName\":\"tab0\",\"dbName\":\"db0\",\"owner\":\"me\"," + "\"createTime\":0,\"lastAccessTime\":0,\"retention\":0," + "\"partitionKeys\":[{\"name\":\"pcol1\",\"type\":\"string\",\"comment\":\"\"}," + "{\"name\":\"pcol2\",\"type\":\"string\",\"comment\":\"\"}],\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}," + "\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column " + "col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78," + "\"numNulls\":29,\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep + "{\"tableName\":\"tab1\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0," + "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\"," + "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\"," + "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep + "{\"tableName\":\"tab2\",\"dbName\":\"db0\",\"owner\":\"me\",\"createTime\":0," + "\"lastAccessTime\":0,\"retention\":0,\"partitionKeys\":[{\"name\":\"pcol1\"," + "\"type\":\"string\",\"comment\":\"\"},{\"name\":\"pcol2\",\"type\":\"string\"," + "\"comment\":\"\"}],\"parameters\":{},\"tableType\":\"\",\"rewriteEnabled\":0} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
List<List<String>> partVals = Arrays.asList(Arrays.asList("a", "b"), Arrays.asList("c", "d"));
for (List<String> pv : partVals) {
StorageDescriptor sd = new StorageDescriptor(Arrays.asList(new FieldSchema("col1", "int", ""), new FieldSchema("col2", "varchar(32)", "")), "/tmp", null, null, false, 0, null, null, null, Collections.<String, String>emptyMap());
Partition p = new Partition(pv, "db0", "tab1", 0, 0, sd, Collections.<String, String>emptyMap());
store.addPartition(p);
}
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.PART_TABLE, "db0.tab1.a.b", null, conf, out, err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: " + "qQTgZAi5VzgpozzFGmIVTQ stats:" + lsep, outStr.toString());
ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(false, "db0", "tab1");
statsDesc.setPartName("pcol1=c/pcol2=d");
ColumnStatisticsData csd1 = new ColumnStatisticsData();
LongColumnStatsData lcsd = new LongColumnStatsData(1, 2);
lcsd.setLowValue(-95);
lcsd.setHighValue(95);
csd1.setLongStats(lcsd);
ColumnStatisticsData csd2 = new ColumnStatisticsData();
csd2.setStringStats(new StringColumnStatsData(97, 18.78, 29, 397));
List<ColumnStatisticsObj> csos = Arrays.asList(new ColumnStatisticsObj("col1", "int", csd1), new ColumnStatisticsObj("col2", "varchar(32)", csd2));
ColumnStatistics statsObj = new ColumnStatistics(statsDesc, csos);
store.updatePartitionColumnStatistics(statsObj, partVals.get(1));
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.PART_TABLE, "db0.tab1.c.d", null, conf, out, err);
Assert.assertEquals("{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats: column col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29," + "\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.PART_TABLE, null, "db0.tab1", conf, out, err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:" + lsep + "{\"values\":[\"c\",\"d\"],\"dbName\":\"db0\",\"tableName\":\"tab1\",\"createTime\":0," + "\"lastAccessTime\":0,\"parameters\":{\"COLUMN_STATS_ACCURATE\":\"{\\\"COLUMN_STATS\\\":{\\\"col1\\\":\\\"true\\\",\\\"col2\\\":\\\"true\\\"}}\"}} sdHash: qQTgZAi5VzgpozzFGmIVTQ stats: column " + "col1: {\"colName\":\"col1\",\"colType\":\"int\"," + "\"statsData\":{\"longStats\":{\"lowValue\":-95,\"highValue\":95,\"numNulls\":1," + "\"numDVs\":2,\"bitVectors\":\"\"}}} column col2: {\"colName\":\"col2\",\"colType\":\"varchar(32)\"," + "\"statsData\":{\"stringStats\":{\"maxColLen\":97,\"avgColLen\":18.78,\"numNulls\":29," + "\"numDVs\":397,\"bitVectors\":\"\"}}}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.PART_TABLE, null, "db0.tab1.a", conf, out, err);
Assert.assertEquals("{\"values\":[\"a\",\"b\"],\"dbName\":\"db0\",\"tableName\":\"tab1\"," + "\"createTime\":0,\"lastAccessTime\":0,\"parameters\":{}} sdHash: qQTgZAi5VzgpozzFGmIVTQ " + "stats:" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SD_TABLE, "qQTgZAi5VzgpozzFGmIVTQ", null, conf, out, err);
Assert.assertEquals("{\"cols\":[{\"name\":\"col1\",\"type\":\"int\",\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0," + "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SD_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("qQTgZAi5VzgpozzFGmIVTQ: {\"cols\":[{\"name\":\"col1\",\"type\":\"int\"," + "\"comment\":\"\"}," + "{\"name\":\"col2\",\"type\":\"varchar(32)\",\"comment\":\"\"}],\"compressed\":0," + "\"numBuckets\":0,\"bucketCols\":[],\"sortCols\":[],\"storedAsSubDirectories\":0}" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SECURITY_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("No security related entries" + lsep, outStr.toString());
store.addMasterKey("this be a key");
store.addToken("tokenid", "delegation token");
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SECURITY_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("Master key 0: this be a key" + lsep + "Delegation token tokenid: delegation token" + lsep, outStr.toString());
outStr = new ByteArrayOutputStream();
out = new PrintStream(outStr);
tool.go(false, HBaseReadWrite.SEQUENCES_TABLE, null, "whatever", conf, out, err);
Assert.assertEquals("master_key: 1" + lsep, outStr.toString());
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestHBaseStore method dropFunction.
@Test
public void dropFunction() throws Exception {
String funcName = "delfunc";
int now = (int) (System.currentTimeMillis() / 1000);
Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere")));
store.createFunction(func);
Function f = store.getFunction(DB, funcName);
Assert.assertNotNull(f);
store.dropFunction(DB, funcName);
//thrown.expect(NoSuchObjectException.class);
Assert.assertNull(store.getFunction(DB, funcName));
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestDbNotificationListener method createFunction.
@Test
public void createFunction() throws Exception {
String defaultDbName = "default";
String funcName = "createfunction";
String funcName2 = "createfunction2";
String ownerName = "me";
String funcClass = "o.a.h.h.createfunc";
String funcClass2 = "o.a.h.h.createfunc2";
String funcResource = "file:/tmp/somewhere";
String funcResource2 = "file:/tmp/somewhere2";
Function func = new Function(funcName, defaultDbName, funcClass, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource)));
// Event 1
msClient.createFunction(func);
// Get notifications from metastore
NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(1, rsp.getEventsSize());
NotificationEvent event = rsp.getEvents().get(0);
assertEquals(firstEventId + 1, event.getEventId());
assertTrue(event.getEventTime() >= startTime);
assertEquals(EventType.CREATE_FUNCTION.toString(), event.getEventType());
assertEquals(defaultDbName, event.getDbName());
// Parse the message field
CreateFunctionMessage createFuncMsg = md.getCreateFunctionMessage(event.getMessage());
assertEquals(defaultDbName, createFuncMsg.getDB());
Function funcObj = createFuncMsg.getFunctionObj();
assertEquals(defaultDbName, funcObj.getDbName());
assertEquals(funcName, funcObj.getFunctionName());
assertEquals(funcClass, funcObj.getClassName());
assertEquals(ownerName, funcObj.getOwnerName());
assertEquals(FunctionType.JAVA, funcObj.getFunctionType());
assertEquals(1, funcObj.getResourceUrisSize());
assertEquals(ResourceType.JAR, funcObj.getResourceUris().get(0).getResourceType());
assertEquals(funcResource, funcObj.getResourceUris().get(0).getUri());
// Verify the eventID was passed to the non-transactional listener
MockMetaStoreEventListener.popAndVerifyLastEventId(EventType.CREATE_FUNCTION, firstEventId + 1);
// When hive.metastore.transactional.event.listeners is set,
// a failed event should not create a new notification
DummyRawStoreFailEvent.setEventSucceed(false);
func = new Function(funcName2, defaultDbName, funcClass2, ownerName, PrincipalType.USER, startTime, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, funcResource2)));
try {
msClient.createFunction(func);
fail("Error: create function should've failed");
} catch (Exception ex) {
// expected
}
rsp = msClient.getNextNotification(firstEventId, 0, null);
assertEquals(1, rsp.getEventsSize());
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class DbNotificationListener method onCreateFunction.
/**
* @param fnEvent function event
* @throws MetaException
*/
@Override
public void onCreateFunction(CreateFunctionEvent fnEvent) throws MetaException {
Function fn = fnEvent.getFunction();
NotificationEvent event = new NotificationEvent(0, now(), EventType.CREATE_FUNCTION.toString(), msgFactory.buildCreateFunctionMessage(fn).toString());
event.setDbName(fn.getDbName());
process(event, fnEvent);
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class ReplDumpTask method dumpFunctionMetadata.
private void dumpFunctionMetadata(String dbName, Path dumpRoot) throws Exception {
Path functionsRoot = new Path(new Path(dumpRoot, dbName), FUNCTIONS_ROOT_DIR_NAME);
List<String> functionNames = getHive().getFunctions(dbName, "*");
for (String functionName : functionNames) {
HiveWrapper.Tuple<Function> tuple = functionTuple(functionName, dbName);
if (tuple == null) {
continue;
}
Path functionRoot = new Path(functionsRoot, functionName);
Path functionMetadataFile = new Path(functionRoot, FUNCTION_METADATA_FILE_NAME);
try (JsonWriter jsonWriter = new JsonWriter(functionMetadataFile.getFileSystem(conf), functionMetadataFile)) {
FunctionSerializer serializer = new FunctionSerializer(tuple.object, conf);
serializer.writeTo(jsonWriter, tuple.replicationSpec);
}
replLogger.functionLog(functionName);
}
}
Aggregations