use of org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter in project hive by apache.
the class CreateFunctionHandler method handle.
@Override
public void handle(Context withinContext) throws Exception {
CreateFunctionMessage createFunctionMessage = deserializer.getCreateFunctionMessage(event.getMessage());
LOG.info("Processing#{} CREATE_MESSAGE message : {}", fromEventId(), event.getMessage());
Path metadataPath = new Path(withinContext.eventRoot, EximUtil.METADATA_NAME);
FileSystem fileSystem = metadataPath.getFileSystem(withinContext.hiveConf);
try (JsonWriter jsonWriter = new JsonWriter(fileSystem, metadataPath)) {
new FunctionSerializer(createFunctionMessage.getFunctionObj(), withinContext.hiveConf).writeTo(jsonWriter, withinContext.replicationSpec);
}
withinContext.createDmd(this).write();
}
use of org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter in project hive by apache.
the class ReplDumpTask method dumpConstraintMetadata.
private void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot) throws Exception {
try {
Path constraintsRoot = new Path(dbRoot, CONSTRAINTS_ROOT_DIR_NAME);
Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
Hive db = getHive();
List<SQLPrimaryKey> pks = db.getPrimaryKeyList(dbName, tblName);
List<SQLForeignKey> fks = db.getForeignKeyList(dbName, tblName);
List<SQLUniqueConstraint> uks = db.getUniqueConstraintList(dbName, tblName);
List<SQLNotNullConstraint> nns = db.getNotNullConstraintList(dbName, tblName);
if ((pks != null && !pks.isEmpty()) || (uks != null && !uks.isEmpty()) || (nns != null && !nns.isEmpty())) {
try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(pks, null, uks, nns, conf);
serializer.writeTo(jsonWriter, null);
}
}
if (fks != null && !fks.isEmpty()) {
try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
ConstraintsSerializer serializer = new ConstraintsSerializer(null, fks, null, null, conf);
serializer.writeTo(jsonWriter, null);
}
}
} catch (NoSuchObjectException e) {
// Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
// Just log a debug message and skip it.
LOG.debug(e.getMessage());
}
}
use of org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter in project hive by apache.
the class ReplDumpTask method dumpFunctionMetadata.
private void dumpFunctionMetadata(String dbName, Path dumpRoot) throws Exception {
Path functionsRoot = new Path(new Path(dumpRoot, dbName), FUNCTIONS_ROOT_DIR_NAME);
List<String> functionNames = getHive().getFunctions(dbName, "*");
for (String functionName : functionNames) {
HiveWrapper.Tuple<Function> tuple = functionTuple(functionName, dbName);
if (tuple == null) {
continue;
}
Path functionRoot = new Path(functionsRoot, functionName);
Path functionMetadataFile = new Path(functionRoot, FUNCTION_METADATA_FILE_NAME);
try (JsonWriter jsonWriter = new JsonWriter(functionMetadataFile.getFileSystem(conf), functionMetadataFile)) {
FunctionSerializer serializer = new FunctionSerializer(tuple.object, conf);
serializer.writeTo(jsonWriter, tuple.replicationSpec);
}
replLogger.functionLog(functionName);
}
}
use of org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter in project hive by apache.
the class EximUtil method createDbExportDump.
public static void createDbExportDump(FileSystem fs, Path metadataPath, Database dbObj, ReplicationSpec replicationSpec) throws IOException, SemanticException {
// WARNING NOTE : at this point, createDbExportDump lives only in a world where ReplicationSpec is in replication scope
// If we later make this work for non-repl cases, analysis of this logic might become necessary. Also, this is using
// Replv2 semantics, i.e. with listFiles laziness (no copy at export time)
// Remove all the entries from the parameters which are added for bootstrap dump progress
Map<String, String> parameters = dbObj.getParameters();
if (parameters != null) {
Map<String, String> tmpParameters = new HashMap<>(parameters);
tmpParameters.entrySet().removeIf(e -> e.getKey().startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX));
dbObj.setParameters(tmpParameters);
}
try (JsonWriter jsonWriter = new JsonWriter(fs, metadataPath)) {
new DBSerializer(dbObj).writeTo(jsonWriter, replicationSpec);
}
if (parameters != null) {
dbObj.setParameters(parameters);
}
}
Aggregations