use of org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry in project hadoop by apache.
the class TestRetryCacheWithHA method listCacheDirectives.
@SuppressWarnings("unchecked")
private void listCacheDirectives(HashSet<String> poolNames, int active) throws Exception {
HashSet<String> tmpNames = (HashSet<String>) poolNames.clone();
RemoteIterator<CacheDirectiveEntry> directives = dfs.listCacheDirectives(null);
int poolCount = poolNames.size();
for (int i = 0; i < poolCount; i++) {
CacheDirectiveEntry directive = directives.next();
String pollName = directive.getInfo().getPool();
assertTrue("The pool name should be expected", tmpNames.remove(pollName));
if (i % 2 == 0) {
int standby = active;
active = (standby == 0) ? 1 : 0;
cluster.transitionToStandby(standby);
cluster.transitionToActive(active);
cluster.waitActive(active);
}
}
assertTrue("All pools must be found", tmpNames.isEmpty());
}
use of org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry in project SSM by Intel-bigdata.
the class CacheFileAction method isCached.
public boolean isCached(String fileName) throws Exception {
CacheDirectiveInfo.Builder filterBuilder = new CacheDirectiveInfo.Builder();
filterBuilder.setPath(new Path(fileName));
CacheDirectiveInfo filter = filterBuilder.build();
RemoteIterator<CacheDirectiveEntry> directiveEntries = dfsClient.listCacheDirectives(filter);
return directiveEntries.hasNext();
}
Aggregations