use of org.apache.hadoop.hive.ql.parse.repl.dump.HiveWrapper in project hive by apache.
the class ReplDumpTask method dumpTable.
private void dumpTable(String dbName, String tblName, Path dbRoot) throws Exception {
try {
Hive db = getHive();
HiveWrapper.Tuple<Table> tuple = new HiveWrapper(db, dbName).table(tblName);
TableSpec tableSpec = new TableSpec(tuple.object);
TableExport.Paths exportPaths = new TableExport.Paths(work.astRepresentationForErrorMsg, dbRoot, tblName, conf, true);
String distCpDoAsUser = conf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
// by default for all other objects this is false
tuple.replicationSpec.setIsReplace(true);
new TableExport(exportPaths, tableSpec, tuple.replicationSpec, db, distCpDoAsUser, conf).write();
replLogger.tableLog(tblName, tableSpec.tableHandle.getTableType());
} catch (InvalidTableException te) {
// Bootstrap dump shouldn't fail if the table is dropped/renamed while dumping it.
// Just log a debug message and skip it.
LOG.debug(te.getMessage());
}
}
use of org.apache.hadoop.hive.ql.parse.repl.dump.HiveWrapper in project hive by apache.
the class ReplDumpTask method dumpDbMetadata.
private Path dumpDbMetadata(String dbName, Path dumpRoot) throws Exception {
Path dbRoot = new Path(dumpRoot, dbName);
// TODO : instantiating FS objects are generally costly. Refactor
FileSystem fs = dbRoot.getFileSystem(conf);
Path dumpPath = new Path(dbRoot, EximUtil.METADATA_NAME);
HiveWrapper.Tuple<Database> database = new HiveWrapper(getHive(), dbName).database();
EximUtil.createDbExportDump(fs, dumpPath, database.object, database.replicationSpec);
return dbRoot;
}
Aggregations