Search in sources :

Example 1 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hadoop by apache.

the class SnapshotManager method getSnapshottableRoot.

/**
  * Find the source root directory where the snapshot will be taken
  * for a given path.
  *
  * @return Snapshottable directory.
  * @throws IOException
  *           Throw IOException when the given path does not lead to an
  *           existing snapshottable directory.
  */
public INodeDirectory getSnapshottableRoot(final INodesInPath iip) throws IOException {
    final String path = iip.getPath();
    final INodeDirectory dir = INodeDirectory.valueOf(iip.getLastINode(), path);
    if (!dir.isSnapshottable()) {
        throw new SnapshotException("Directory is not a snapshottable directory: " + path);
    }
    return dir;
}
Also used : INodeDirectory(org.apache.hadoop.hdfs.server.namenode.INodeDirectory) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 2 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hadoop by apache.

the class SnapshotManager method createSnapshot.

/**
   * Create a snapshot of the given path.
   * It is assumed that the caller will perform synchronization.
   *
   * @param iip the INodes resolved from the snapshottable directory's path
   * @param snapshotName
   *          The name of the snapshot.
   * @throws IOException
   *           Throw IOException when 1) the given path does not lead to an
   *           existing snapshottable directory, and/or 2) there exists a
   *           snapshot with the given name for the directory, and/or 3)
   *           snapshot number exceeds quota
   */
public String createSnapshot(final INodesInPath iip, String snapshotRoot, String snapshotName) throws IOException {
    INodeDirectory srcRoot = getSnapshottableRoot(iip);
    if (snapshotCounter == getMaxSnapshotID()) {
        // requests.
        throw new SnapshotException("Failed to create the snapshot. The FileSystem has run out of " + "snapshot IDs and ID rollover is not supported.");
    }
    srcRoot.addSnapshot(snapshotCounter, snapshotName);
    //create success, update id
    snapshotCounter++;
    numSnapshots.getAndIncrement();
    return Snapshot.getSnapshotPath(snapshotRoot, snapshotName);
}
Also used : INodeDirectory(org.apache.hadoop.hdfs.server.namenode.INodeDirectory) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 3 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hadoop by apache.

the class SnapshotManager method resetSnapshottable.

/**
   * Set the given snapshottable directory to non-snapshottable.
   * 
   * @throws SnapshotException if there are snapshots in the directory.
   */
public void resetSnapshottable(final String path) throws IOException {
    final INodesInPath iip = fsdir.getINodesInPath(path, DirOp.WRITE);
    final INodeDirectory d = INodeDirectory.valueOf(iip.getLastINode(), path);
    DirectorySnapshottableFeature sf = d.getDirectorySnapshottableFeature();
    if (sf == null) {
        // the directory is already non-snapshottable
        return;
    }
    if (sf.getNumSnapshots() > 0) {
        throw new SnapshotException("The directory " + path + " has snapshot(s). " + "Please redo the operation after removing all the snapshots.");
    }
    if (d == fsdir.getRoot()) {
        d.setSnapshotQuota(0);
    } else {
        d.removeSnapshottableFeature();
    }
    removeSnapshottable(d);
}
Also used : INodeDirectory(org.apache.hadoop.hdfs.server.namenode.INodeDirectory) INodesInPath(org.apache.hadoop.hdfs.server.namenode.INodesInPath) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 4 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hadoop by apache.

the class DFSAdmin method allowSnapshot.

/**
   * Allow snapshot on a directory.
   * Usage: hdfs dfsadmin -allowSnapshot snapshotDir
   * @param argv List of of command line parameters.
   * @exception IOException
   */
public void allowSnapshot(String[] argv) throws IOException {
    Path p = new Path(argv[1]);
    final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), getConf());
    try {
        dfs.allowSnapshot(p);
    } catch (SnapshotException e) {
        throw new RemoteException(e.getClass().getName(), e.getMessage());
    }
    System.out.println("Allowing snaphot on " + argv[1] + " succeeded");
}
Also used : Path(org.apache.hadoop.fs.Path) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) RemoteException(org.apache.hadoop.ipc.RemoteException) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 5 with SnapshotException

use of org.apache.hadoop.hdfs.protocol.SnapshotException in project hadoop by apache.

the class DFSAdmin method disallowSnapshot.

/**
   * Disallow snapshot on a directory.
   * Usage: hdfs dfsadmin -disallowSnapshot snapshotDir
   * @param argv List of of command line parameters.
   * @exception IOException
   */
public void disallowSnapshot(String[] argv) throws IOException {
    Path p = new Path(argv[1]);
    final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), getConf());
    try {
        dfs.disallowSnapshot(p);
    } catch (SnapshotException e) {
        throw new RemoteException(e.getClass().getName(), e.getMessage());
    }
    System.out.println("Disallowing snaphot on " + argv[1] + " succeeded");
}
Also used : Path(org.apache.hadoop.fs.Path) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) RemoteException(org.apache.hadoop.ipc.RemoteException) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Aggregations

SnapshotException (org.apache.hadoop.hdfs.protocol.SnapshotException)17 Path (org.apache.hadoop.fs.Path)9 INodeDirectory (org.apache.hadoop.hdfs.server.namenode.INodeDirectory)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)4 Test (org.junit.Test)4 IOException (java.io.IOException)3 DirectorySnapshottableFeature (org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)3 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)3 FileNotFoundException (java.io.FileNotFoundException)2 INodesInPath (org.apache.hadoop.hdfs.server.namenode.INodesInPath)2 ReplExternalTables.externalTableDataPath (org.apache.hadoop.hive.ql.exec.repl.ReplExternalTables.externalTableDataPath)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 RemoteException (org.apache.hadoop.ipc.RemoteException)2 TException (org.apache.thrift.TException)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 ArrayList (java.util.ArrayList)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 FsPermission (org.apache.hadoop.fs.permission.FsPermission)1 DirectoryListing (org.apache.hadoop.hdfs.protocol.DirectoryListing)1 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)1