Search in sources :

Example 1 with TableExport

use of org.apache.hadoop.hive.ql.parse.repl.dump.TableExport in project hive by apache.

the class ExportTask method execute.

@Override
protected int execute(DriverContext driverContext) {
    try {
        // Also creates the root directory
        TableExport.Paths exportPaths = new TableExport.Paths(work.getAstRepresentationForErrorMsg(), work.getExportRootDir(), conf, false);
        Hive db = getHive();
        LOG.debug("Exporting data to: {}", exportPaths.getExportRootDir());
        TableExport tableExport = new TableExport(exportPaths, work.getTableSpec(), work.getReplicationSpec(), db, null, conf);
        if (!tableExport.write()) {
            throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg());
        }
    } catch (Exception e) {
        LOG.error("failed", e);
        setException(e);
        return 1;
    }
    return 0;
}
Also used : TableExport(org.apache.hadoop.hive.ql.parse.repl.dump.TableExport) Hive(org.apache.hadoop.hive.ql.metadata.Hive) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with TableExport

use of org.apache.hadoop.hive.ql.parse.repl.dump.TableExport in project hive by apache.

the class ReplDumpTask method dumpTable.

private void dumpTable(String dbName, String tblName, Path dbRoot) throws Exception {
    try {
        Hive db = getHive();
        HiveWrapper.Tuple<Table> tuple = new HiveWrapper(db, dbName).table(tblName);
        TableSpec tableSpec = new TableSpec(tuple.object);
        TableExport.Paths exportPaths = new TableExport.Paths(work.astRepresentationForErrorMsg, dbRoot, tblName, conf, true);
        String distCpDoAsUser = conf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
        // by default for all other objects this is false
        tuple.replicationSpec.setIsReplace(true);
        new TableExport(exportPaths, tableSpec, tuple.replicationSpec, db, distCpDoAsUser, conf).write();
        replLogger.tableLog(tblName, tableSpec.tableHandle.getTableType());
    } catch (InvalidTableException te) {
        // Bootstrap dump shouldn't fail if the table is dropped/renamed while dumping it.
        // Just log a debug message and skip it.
        LOG.debug(te.getMessage());
    }
}
Also used : TableExport(org.apache.hadoop.hive.ql.parse.repl.dump.TableExport) Hive(org.apache.hadoop.hive.ql.metadata.Hive) TableSpec(org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveWrapper(org.apache.hadoop.hive.ql.parse.repl.dump.HiveWrapper) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Example 3 with TableExport

use of org.apache.hadoop.hive.ql.parse.repl.dump.TableExport in project hive by apache.

the class ExportSemanticAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
    Tree tableTree = ast.getChild(0);
    Tree toTree = ast.getChild(1);
    ReplicationSpec replicationSpec;
    if (ast.getChildCount() > 2) {
        // Replication case: export table <tbl> to <location> for replication
        replicationSpec = new ReplicationSpec((ASTNode) ast.getChild(2));
    } else {
        // Export case
        replicationSpec = new ReplicationSpec();
    }
    if (replicationSpec.getCurrentReplicationState() == null) {
        try {
            long currentEventId = db.getMSC().getCurrentNotificationEventId().getEventId();
            replicationSpec.setCurrentReplicationState(String.valueOf(currentEventId));
        } catch (Exception e) {
            throw new SemanticException("Error when getting current notification event ID", e);
        }
    }
    // initialize source table/partition
    TableSpec ts;
    try {
        ts = new TableSpec(db, conf, (ASTNode) tableTree, false, true);
    } catch (SemanticException sme) {
        if ((replicationSpec.isInReplicationScope()) && ((sme.getCause() instanceof InvalidTableException) || (sme instanceof Table.ValidationFailureSemanticException))) {
            // If we're in replication scope, it's possible that we're running the export long after
            // the table was dropped, so the table not existing currently or being a different kind of
            // table is not an error - it simply means we should no-op, and let a future export
            // capture the appropriate state
            ts = null;
        } else {
            throw sme;
        }
    }
    // initialize export path
    String tmpPath = stripQuotes(toTree.getText());
    // All parsing is done, we're now good to start the export process
    TableExport.Paths exportPaths = new TableExport.Paths(ErrorMsg.INVALID_PATH.getMsg(ast), tmpPath, conf, false);
    TableExport tableExport = new TableExport(exportPaths, ts, replicationSpec, db, null, conf);
    TableExport.AuthEntities authEntities = tableExport.getAuthEntities();
    inputs.addAll(authEntities.inputs);
    outputs.addAll(authEntities.outputs);
    String exportRootDirName = tmpPath;
    // Configure export work
    ExportWork exportWork = new ExportWork(exportRootDirName, ts, replicationSpec, ErrorMsg.INVALID_PATH.getMsg(ast));
    // Create an export task and add it as a root task
    Task<ExportWork> exportTask = TaskFactory.get(exportWork);
    rootTasks.add(exportTask);
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) ExportWork(org.apache.hadoop.hive.ql.plan.ExportWork) TableExport(org.apache.hadoop.hive.ql.parse.repl.dump.TableExport) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) Tree(org.antlr.runtime.tree.Tree)

Aggregations

TableExport (org.apache.hadoop.hive.ql.parse.repl.dump.TableExport)3 Hive (org.apache.hadoop.hive.ql.metadata.Hive)2 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)2 Table (org.apache.hadoop.hive.ql.metadata.Table)2 Tree (org.antlr.runtime.tree.Tree)1 TableSpec (org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 HiveWrapper (org.apache.hadoop.hive.ql.parse.repl.dump.HiveWrapper)1 ExportWork (org.apache.hadoop.hive.ql.plan.ExportWork)1