Search in sources :

Example 16 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hive by apache.

the class ReplUtils method handleException.

public static int handleException(boolean isReplication, Throwable e, String nonRecoverablePath, ReplicationMetricCollector metricCollector, String stageName, HiveConf conf) {
    int errorCode;
    if (isReplication && e instanceof SnapshotException) {
        errorCode = ErrorMsg.getErrorMsg("SNAPSHOT_ERROR").getErrorCode();
    } else {
        errorCode = ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
    }
    if (isReplication) {
        try {
            if (nonRecoverablePath != null) {
                final int recoverableLimit = ErrorMsg.GENERIC_ERROR.getErrorCode();
                String metricStage = getMetricStageName(stageName, metricCollector);
                if (errorCode > recoverableLimit) {
                    Path nonRecoverableMarker = new Path(new Path(nonRecoverablePath), ReplAck.NON_RECOVERABLE_MARKER.toString());
                    Utils.writeStackTrace(e, nonRecoverableMarker, conf);
                    metricCollector.reportStageEnd(metricStage, Status.FAILED_ADMIN, nonRecoverableMarker.toString());
                } else {
                    metricCollector.reportStageEnd(metricStage, Status.FAILED);
                }
            }
        } catch (Exception ex) {
            LOG.error("Failed to collect Metrics ", ex);
        }
    }
    return errorCode;
}
Also used : Path(org.apache.hadoop.fs.Path) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) TException(org.apache.thrift.TException) IOException(java.io.IOException)

Example 17 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hive by apache.

the class DirCopyTask method copyUsingDistCpSnapshots.

boolean copyUsingDistCpSnapshots(Path sourcePath, Path targetPath, UserGroupInformation proxyUser, HiveConf clonedConf) throws IOException {
    DistributedFileSystem targetFs = SnapshotUtils.getDFS(targetPath, clonedConf);
    boolean result = false;
    if (getWork().getCopyMode().equals(SnapshotUtils.SnapshotCopyMode.DIFF_COPY)) {
        LOG.info("Using snapshot diff copy for source: {} and target: {}", sourcePath, targetPath);
        boolean overwriteTarget = clonedConf.getBoolVar(REPL_SNAPSHOT_OVERWRITE_TARGET_FOR_EXTERNAL_TABLE_COPY);
        LOG.debug("Overwrite target in case the target location is modified is turned {}", overwriteTarget ? "on" : "off");
        result = FileUtils.distCpWithSnapshot(firstSnapshot(work.getSnapshotPrefix()), secondSnapshot(work.getSnapshotPrefix()), Collections.singletonList(sourcePath), targetPath, overwriteTarget, clonedConf, ShimLoader.getHadoopShims(), proxyUser);
        if (result) {
            // Delete the older snapshot from last iteration.
            targetFs.deleteSnapshot(targetPath, firstSnapshot(work.getSnapshotPrefix()));
        } else {
            throw new SnapshotException("Can not successfully copy external table data using snapshot diff. source: " + sourcePath + " and " + "target: " + targetPath);
        }
    } else if (getWork().getCopyMode().equals(SnapshotUtils.SnapshotCopyMode.INITIAL_COPY)) {
        LOG.info("Using snapshot initial copy for source: {} and target: {}", sourcePath, targetPath);
        // Get the path relative to the initial snapshot for copy.
        Path snapRelPath = new Path(sourcePath, HdfsConstants.DOT_SNAPSHOT_DIR + "/" + secondSnapshot(work.getSnapshotPrefix()));
        // This is the first time we are copying, check if the target is snapshottable or not, if not attempt to allow
        // snapshots.
        SnapshotUtils.allowSnapshot(targetFs, work.getFullyQualifiedTargetPath(), clonedConf);
        // Attempt to delete the snapshot, in case this is a bootstrap post a failed incremental, Since in case of
        // bootstrap we go from start, so delete any pre-existing snapshot.
        SnapshotUtils.deleteSnapshotIfExists(targetFs, targetPath, firstSnapshot(work.getSnapshotPrefix()), clonedConf);
        // Copy from the initial snapshot path.
        result = runFallbackDistCp(snapRelPath, targetPath, proxyUser, clonedConf);
    }
    // Create a new snapshot at target Filesystem. For the next iteration.
    if (result) {
        SnapshotUtils.createSnapshot(targetFs, targetPath, firstSnapshot(work.getSnapshotPrefix()), clonedConf);
    }
    return result;
}
Also used : Path(org.apache.hadoop.fs.Path) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Aggregations

SnapshotException (org.apache.hadoop.hdfs.protocol.SnapshotException)17 Path (org.apache.hadoop.fs.Path)9 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.INodeDirectory)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)4 Test (org.junit.Test)4 IOException (java.io.IOException)3 DirectorySnapshottableFeature (org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)3 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)3 FileNotFoundException (java.io.FileNotFoundException)2 INodesInPath (org.apache.hadoop.hdfs.server.namenode.INodesInPath)2 ReplExternalTables.externalTableDataPath (org.apache.hadoop.hive.ql.exec.repl.ReplExternalTables.externalTableDataPath)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 RemoteException (org.apache.hadoop.ipc.RemoteException)2 TException (org.apache.thrift.TException)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 ArrayList (java.util.ArrayList)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 DirectoryListing (org.apache.hadoop.hdfs.protocol.DirectoryListing)1 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)1