use of org.apache.hadoop.hive.ql.parse.repl.dump.io.FunctionSerializer in project hive by apache.
the class CreateFunctionHandler method handle.
@Override
public void handle(Context withinContext) throws Exception {
CreateFunctionMessage createFunctionMessage = deserializer.getCreateFunctionMessage(event.getMessage());
LOG.info("Processing#{} CREATE_MESSAGE message : {}", fromEventId(), event.getMessage());
Path metadataPath = new Path(withinContext.eventRoot, EximUtil.METADATA_NAME);
FileSystem fileSystem = metadataPath.getFileSystem(withinContext.hiveConf);
try (JsonWriter jsonWriter = new JsonWriter(fileSystem, metadataPath)) {
new FunctionSerializer(createFunctionMessage.getFunctionObj(), withinContext.hiveConf).writeTo(jsonWriter, withinContext.replicationSpec);
}
withinContext.createDmd(this).write();
}
use of org.apache.hadoop.hive.ql.parse.repl.dump.io.FunctionSerializer in project hive by apache.
the class ReplDumpTask method dumpFunctionMetadata.
private void dumpFunctionMetadata(String dbName, Path dumpRoot) throws Exception {
Path functionsRoot = new Path(new Path(dumpRoot, dbName), FUNCTIONS_ROOT_DIR_NAME);
List<String> functionNames = getHive().getFunctions(dbName, "*");
for (String functionName : functionNames) {
HiveWrapper.Tuple<Function> tuple = functionTuple(functionName, dbName);
if (tuple == null) {
continue;
}
Path functionRoot = new Path(functionsRoot, functionName);
Path functionMetadataFile = new Path(functionRoot, FUNCTION_METADATA_FILE_NAME);
try (JsonWriter jsonWriter = new JsonWriter(functionMetadataFile.getFileSystem(conf), functionMetadataFile)) {
FunctionSerializer serializer = new FunctionSerializer(tuple.object, conf);
serializer.writeTo(jsonWriter, tuple.replicationSpec);
}
replLogger.functionLog(functionName);
}
}
Aggregations