use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class TestAuditLoggerWithCommands method testRemoveCacheDirective.
@Test
public void testRemoveCacheDirective() throws Exception {
removeExistingCachePools(null);
proto.addCachePool(new CachePoolInfo("pool1").setMode(new FsPermission((short) 0)));
CacheDirectiveInfo alpha = new CacheDirectiveInfo.Builder().setPath(new Path("/alpha")).setPool("pool1").build();
String aceRemoveCachePattern = ".*allowed=false.*ugi=theDoctor.*cmd=removeCache.*";
int length = -1;
Long id = ((DistributedFileSystem) fs).addCacheDirective(alpha);
fileSys = DFSTestUtil.getFileSystemAs(user1, conf);
try {
((DistributedFileSystem) fileSys).removeCacheDirective(id);
fail("It should have failed with an AccessControlException");
} catch (AccessControlException ace) {
length = verifyAuditLogs(aceRemoveCachePattern);
}
try {
fileSys.close();
((DistributedFileSystem) fileSys).removeCacheDirective(id);
fail("The operation should have failed with IOException");
} catch (IOException e) {
}
assertTrue("Unexpected log!", length == auditlog.getOutput().split("\n").length);
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class ClientNamenodeProtocolServerSideTranslatorPB method listCacheDirectives.
@Override
public ListCacheDirectivesResponseProto listCacheDirectives(RpcController controller, ListCacheDirectivesRequestProto request) throws ServiceException {
try {
CacheDirectiveInfo filter = PBHelperClient.convert(request.getFilter());
BatchedEntries<CacheDirectiveEntry> entries = server.listCacheDirectives(request.getPrevId(), filter);
ListCacheDirectivesResponseProto.Builder builder = ListCacheDirectivesResponseProto.newBuilder();
builder.setHasMore(entries.hasMore());
for (int i = 0, n = entries.size(); i < n; i++) {
builder.addElements(PBHelperClient.convert(entries.get(i)));
}
return builder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class DistributedFileSystem method addCacheDirective.
/**
* Add a new CacheDirective.
*
* @param info Information about a directive to add.
* @param flags {@link CacheFlag}s to use for this operation.
* @return the ID of the directive that was created.
* @throws IOException if the directive could not be added
*/
public long addCacheDirective(CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException {
Preconditions.checkNotNull(info.getPath());
Path path = new Path(getPathName(fixRelativePart(info.getPath()))).makeQualified(getUri(), getWorkingDirectory());
return dfs.addCacheDirective(new CacheDirectiveInfo.Builder(info).setPath(path).build(), flags);
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class DistributedFileSystem method listCacheDirectives.
/**
* List cache directives. Incrementally fetches results from the server.
*
* @param filter Filter parameters to use when listing the directives, null to
* list all directives visible to us.
* @return A RemoteIterator which returns CacheDirectiveInfo objects.
*/
public RemoteIterator<CacheDirectiveEntry> listCacheDirectives(CacheDirectiveInfo filter) throws IOException {
if (filter == null) {
filter = new CacheDirectiveInfo.Builder().build();
}
if (filter.getPath() != null) {
filter = new CacheDirectiveInfo.Builder(filter).setPath(new Path(getPathName(fixRelativePart(filter.getPath())))).build();
}
final RemoteIterator<CacheDirectiveEntry> iter = dfs.listCacheDirectives(filter);
return new RemoteIterator<CacheDirectiveEntry>() {
@Override
public boolean hasNext() throws IOException {
return iter.hasNext();
}
@Override
public CacheDirectiveEntry next() throws IOException {
// Although the paths we get back from the NameNode should always be
// absolute, we call makeQualified to add the scheme and authority of
// this DistributedFilesystem.
CacheDirectiveEntry desc = iter.next();
CacheDirectiveInfo info = desc.getInfo();
Path p = info.getPath().makeQualified(getUri(), getWorkingDirectory());
return new CacheDirectiveEntry(new CacheDirectiveInfo.Builder(info).setPath(p).build(), desc.getStats());
}
};
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo in project hadoop by apache.
the class PBHelperClient method convert.
public static CacheDirectiveEntry convert(CacheDirectiveEntryProto proto) {
CacheDirectiveInfo info = convert(proto.getInfo());
CacheDirectiveStats stats = convert(proto.getStats());
return new CacheDirectiveEntry(info, stats);
}
Aggregations