Search in sources :

Example 6 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class TestAuditLoggerWithCommands method testRemoveCacheDirective.

@Test
public void testRemoveCacheDirective() throws Exception {
    removeExistingCachePools(null);
    proto.addCachePool(new CachePoolInfo("pool1").setMode(new FsPermission((short) 0)));
    CacheDirectiveInfo alpha = new CacheDirectiveInfo.Builder().setPath(new Path("/alpha")).setPool("pool1").build();
    String aceRemoveCachePattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCache.*";
    int length = -1;
    Long id = ((DistributedFileSystem) fs).addCacheDirective(alpha);
    fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
    try {
        ((DistributedFileSystem) fileSys).removeCacheDirective(id);
        fail("It should have failed with an AccessControlException");
    } catch (AccessControlException ace) {
        length = verifyAuditLogs(aceRemoveCachePattern);
    }
    try {
        fileSys.close();
        ((DistributedFileSystem) fileSys).removeCacheDirective(id);
        fail("The operation should have failed with IOException");
    } catch (IOException e) {
    }
    assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
Also used : Path(org.apache.hadoop.fs.Path) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) AccessControlException(org.apache.hadoop.security.AccessControlException) FsPermission(org.apache.hadoop.fs.permission.FsPermission) IOException(java.io.IOException) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) CachePoolInfo(org.apache.hadoop.hdfs.protocol.CachePoolInfo) Test(org.junit.Test)

Example 7 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class ClientNamenodeProtocolServerSideTranslatorPB method listCacheDirectives.

@Override
public ListCacheDirectivesResponseProto listCacheDirectives(RpcController controller, ListCacheDirectivesRequestProto request) throws ServiceException {
    try {
        CacheDirectiveInfo filter = PBHelperClient.convert(request.getFilter());
        BatchedEntries<CacheDirectiveEntry> entries = server.listCacheDirectives(request.getPrevId(), filter);
        ListCacheDirectivesResponseProto.Builder builder = ListCacheDirectivesResponseProto.newBuilder();
        builder.setHasMore(entries.hasMore());
        for (int i = 0, n = entries.size(); i < n; i++) {
            builder.addElements(PBHelperClient.convert(entries.get(i)));
        }
        return builder.build();
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) ServiceException(com.google.protobuf.ServiceException) ListCacheDirectivesResponseProto(org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ListCacheDirectivesResponseProto) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry) IOException(java.io.IOException)

Example 8 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class DistributedFileSystem method addCacheDirective.

/**
   * Add a new CacheDirective.
   *
   * @param info Information about a directive to add.
   * @param flags {@link CacheFlag}s to use for this operation.
   * @return the ID of the directive that was created.
   * @throws IOException if the directive could not be added
   */
public long addCacheDirective(CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException {
    Preconditions.checkNotNull(info.getPath());
    Path path = new Path(getPathName(fixRelativePart(info.getPath()))).makeQualified(getUri(), getWorkingDirectory());
    return dfs.addCacheDirective(new CacheDirectiveInfo.Builder(info).setPath(path).build(), flags);
}
Also used : Path(org.apache.hadoop.fs.Path) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)

Example 9 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class DistributedFileSystem method listCacheDirectives.

/**
   * List cache directives.  Incrementally fetches results from the server.
   *
   * @param filter Filter parameters to use when listing the directives, null to
   *               list all directives visible to us.
   * @return A RemoteIterator which returns CacheDirectiveInfo objects.
   */
public RemoteIterator<CacheDirectiveEntry> listCacheDirectives(CacheDirectiveInfo filter) throws IOException {
    if (filter == null) {
        filter = new CacheDirectiveInfo.Builder().build();
    }
    if (filter.getPath() != null) {
        filter = new CacheDirectiveInfo.Builder(filter).setPath(new Path(getPathName(fixRelativePart(filter.getPath())))).build();
    }
    final RemoteIterator<CacheDirectiveEntry> iter = dfs.listCacheDirectives(filter);
    return new RemoteIterator<CacheDirectiveEntry>() {

        @Override
        public boolean hasNext() throws IOException {
            return iter.hasNext();
        }

        @Override
        public CacheDirectiveEntry next() throws IOException {
            // Although the paths we get back from the NameNode should always be
            // absolute, we call makeQualified to add the scheme and authority of
            // this DistributedFilesystem.
            CacheDirectiveEntry desc = iter.next();
            CacheDirectiveInfo info = desc.getInfo();
            Path p = info.getPath().makeQualified(getUri(), getWorkingDirectory());
            return new CacheDirectiveEntry(new CacheDirectiveInfo.Builder(info).setPath(p).build(), desc.getStats());
        }
    };
}
Also used : Path(org.apache.hadoop.fs.Path) RemoteIterator(org.apache.hadoop.fs.RemoteIterator) CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)

Example 10 with CacheDirectiveInfo

use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.

the class PBHelperClient method convert.

public static CacheDirectiveEntry convert(CacheDirectiveEntryProto proto) {
    CacheDirectiveInfo info = convert(proto.getInfo());
    CacheDirectiveStats stats = convert(proto.getStats());
    return new CacheDirectiveEntry(info, stats);
}
Also used : CacheDirectiveInfo(org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo) CacheDirectiveStats(org.apache.hadoop.hdfs.protocol.CacheDirectiveStats) CacheDirectiveEntry(org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)

Aggregations

CacheDirectiveInfo (org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo)24 Path (org.apache.hadoop.fs.Path)15 CachePoolInfo (org.apache.hadoop.hdfs.protocol.CachePoolInfo)10 IOException (java.io.IOException)9 CacheDirectiveEntry (org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry)9 Test (org.junit.Test)9 InvalidRequestException (org.apache.hadoop.fs.InvalidRequestException)6 AccessControlException (org.apache.hadoop.security.AccessControlException)6 FsPermission (org.apache.hadoop.fs.permission.FsPermission)5 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)4 CacheDirective (org.apache.hadoop.hdfs.protocol.CacheDirective)4 CacheFlag (org.apache.hadoop.fs.CacheFlag)3 CachePoolEntry (org.apache.hadoop.hdfs.protocol.CachePoolEntry)3 ArrayList (java.util.ArrayList)2 Date (java.util.Date)2 ServiceException (com.google.protobuf.ServiceException)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1 List (java.util.List)1 BatchedListEntries (org.apache.hadoop.fs.BatchedRemoteIterator.BatchedListEntries)1