Search in sources :

Example 1 with DirectorySnapshottableFeature

use of org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature in project hadoop by apache.

the class INodeDirectory method removeSnapshottableFeature.

/** remove DirectorySnapshottableFeature */
public void removeSnapshottableFeature() {
    DirectorySnapshottableFeature s = getDirectorySnapshottableFeature();
    Preconditions.checkState(s != null, "The dir does not have snapshottable feature: this=%s", this);
    this.removeFeature(s);
    if (s.getDiffs().asList().size() > 0) {
        // add a DirectoryWithSnapshotFeature back
        DirectoryWithSnapshotFeature sf = new DirectoryWithSnapshotFeature(s.getDiffs());
        addFeature(sf);
    }
}
Also used : DirectoryWithSnapshotFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature) DirectorySnapshottableFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)

Example 2 with DirectorySnapshottableFeature

use of org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature in project hadoop by apache.

the class INodeDirectory method addSnapshottableFeature.

/** add DirectorySnapshottableFeature */
public void addSnapshottableFeature() {
    Preconditions.checkState(!isSnapshottable(), "this is already snapshottable, this=%s", this);
    DirectoryWithSnapshotFeature s = this.getDirectoryWithSnapshotFeature();
    final DirectorySnapshottableFeature snapshottable = new DirectorySnapshottableFeature(s);
    if (s != null) {
        this.removeFeature(s);
    }
    this.addFeature(snapshottable);
}
Also used : DirectoryWithSnapshotFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature) DirectorySnapshottableFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)

Example 3 with DirectorySnapshottableFeature

use of org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature in project hadoop by apache.

the class FSDirSnapshotOp method checkSnapshot.

/**
   * Check if the given INode (or one of its descendants) is snapshottable and
   * already has snapshots.
   *
   * @param target The given INode
   * @param snapshottableDirs The list of directories that are snapshottable
   *                          but do not have snapshots yet
   */
private static void checkSnapshot(INode target, List<INodeDirectory> snapshottableDirs) throws SnapshotException {
    if (target.isDirectory()) {
        INodeDirectory targetDir = target.asDirectory();
        DirectorySnapshottableFeature sf = targetDir.getDirectorySnapshottableFeature();
        if (sf != null) {
            if (sf.getNumSnapshots() > 0) {
                String fullPath = targetDir.getFullPathName();
                throw new SnapshotException("The directory " + fullPath + " cannot be deleted since " + fullPath + " is snapshottable and already has snapshots");
            } else {
                if (snapshottableDirs != null) {
                    snapshottableDirs.add(targetDir);
                }
            }
        }
        for (INode child : targetDir.getChildrenList(Snapshot.CURRENT_STATE_ID)) {
            checkSnapshot(child, snapshottableDirs);
        }
    }
}
Also used : DirectorySnapshottableFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 4 with DirectorySnapshottableFeature

use of org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature in project hadoop by apache.

the class FSDirSnapshotOp method getSnapshotFiles.

/** Get a collection of full snapshot paths given file and snapshot dir.
   * @param lsf a list of snapshottable features
   * @param file full path of the file
   * @return collection of full paths of snapshot of the file
   */
static Collection<String> getSnapshotFiles(FSDirectory fsd, List<DirectorySnapshottableFeature> lsf, String file) throws IOException {
    ArrayList<String> snaps = new ArrayList<>();
    for (DirectorySnapshottableFeature sf : lsf) {
        // for each snapshottable dir e.g. /dir1, /dir2
        final ReadOnlyList<Snapshot> lsnap = sf.getSnapshotList();
        for (Snapshot s : lsnap) {
            // for each snapshot name under snapshottable dir
            // e.g. /dir1/.snapshot/s1, /dir1/.snapshot/s2
            final String dirName = s.getRoot().getRootFullPathName();
            if (!file.startsWith(dirName)) {
                // file not in current snapshot root dir, no need to check other snaps
                break;
            }
            String snapname = s.getRoot().getFullPathName();
            if (dirName.equals(Path.SEPARATOR)) {
                // handle rootDir
                snapname += Path.SEPARATOR;
            }
            snapname += file.substring(file.indexOf(dirName) + dirName.length());
            if (fsd.getFSNamesystem().getFileInfo(snapname, true) != null) {
                snaps.add(snapname);
            }
        }
    }
    return snaps;
}
Also used : Snapshot(org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot) ChunkedArrayList(org.apache.hadoop.util.ChunkedArrayList) ArrayList(java.util.ArrayList) DirectorySnapshottableFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)

Example 5 with DirectorySnapshottableFeature

use of org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature in project hadoop by apache.

the class FSDirStatAndListingOp method getSnapshotsListing.

/**
   * Get a listing of all the snapshots of a snapshottable directory
   */
private static DirectoryListing getSnapshotsListing(FSDirectory fsd, INodesInPath iip, byte[] startAfter) throws IOException {
    Preconditions.checkState(fsd.hasReadLock());
    Preconditions.checkArgument(iip.isDotSnapshotDir(), "%s does not end with %s", iip.getPath(), HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR);
    // drop off the null .snapshot component
    iip = iip.getParentINodesInPath();
    final String dirPath = iip.getPath();
    final INode node = iip.getLastINode();
    final INodeDirectory dirNode = INodeDirectory.valueOf(node, dirPath);
    final DirectorySnapshottableFeature sf = dirNode.getDirectorySnapshottableFeature();
    if (sf == null) {
        throw new SnapshotException("Directory is not a snapshottable directory: " + dirPath);
    }
    final ReadOnlyList<Snapshot> snapshots = sf.getSnapshotList();
    int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter);
    skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1;
    int numOfListing = Math.min(snapshots.size() - skipSize, fsd.getLsLimit());
    final HdfsFileStatus[] listing = new HdfsFileStatus[numOfListing];
    for (int i = 0; i < numOfListing; i++) {
        Snapshot.Root sRoot = snapshots.get(i + skipSize).getRoot();
        listing[i] = createFileStatus(fsd, iip, sRoot, HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED, false);
    }
    return new DirectoryListing(listing, snapshots.size() - skipSize - numOfListing);
}
Also used : DirectoryListing(org.apache.hadoop.hdfs.protocol.DirectoryListing) Snapshot(org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot) HdfsFileStatus(org.apache.hadoop.hdfs.protocol.HdfsFileStatus) DirectorySnapshottableFeature(org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Aggregations

DirectorySnapshottableFeature (org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature)7 SnapshotException (org.apache.hadoop.hdfs.protocol.SnapshotException)3 ArrayList (java.util.ArrayList)2 DirectoryWithSnapshotFeature (org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature)2 Snapshot (org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 Iterator (java.util.Iterator)1 DirectoryListing (org.apache.hadoop.hdfs.protocol.DirectoryListing)1 HdfsFileStatus (org.apache.hadoop.hdfs.protocol.HdfsFileStatus)1 ChunkedArrayList (org.apache.hadoop.util.ChunkedArrayList)1