use of org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto in project hadoop by apache.
the class PBImageXmlWriter method dumpCacheManagerSection.
private void dumpCacheManagerSection(InputStream is) throws IOException {
out.print("<" + CACHE_MANAGER_SECTION_NAME + ">");
CacheManagerSection s = CacheManagerSection.parseDelimitedFrom(is);
o(CACHE_MANAGER_SECTION_NEXT_DIRECTIVE_ID, s.getNextDirectiveId());
o(CACHE_MANAGER_SECTION_NUM_DIRECTIVES, s.getNumDirectives());
o(CACHE_MANAGER_SECTION_NUM_POOLS, s.getNumPools());
for (int i = 0; i < s.getNumPools(); ++i) {
CachePoolInfoProto p = CachePoolInfoProto.parseDelimitedFrom(is);
out.print("<" + CACHE_MANAGER_SECTION_POOL + ">");
o(CACHE_MANAGER_SECTION_POOL_NAME, p.getPoolName()).o(CACHE_MANAGER_SECTION_OWNER_NAME, p.getOwnerName()).o(CACHE_MANAGER_SECTION_GROUP_NAME, p.getGroupName()).o(CACHE_MANAGER_SECTION_MODE, p.getMode()).o(CACHE_MANAGER_SECTION_LIMIT, p.getLimit()).o(CACHE_MANAGER_SECTION_MAX_RELATIVE_EXPIRY, p.getMaxRelativeExpiry());
out.print("</" + CACHE_MANAGER_SECTION_POOL + ">\n");
}
for (int i = 0; i < s.getNumDirectives(); ++i) {
CacheDirectiveInfoProto p = CacheDirectiveInfoProto.parseDelimitedFrom(is);
out.print("<" + CACHE_MANAGER_SECTION_DIRECTIVE + ">");
o(SECTION_ID, p.getId()).o(SECTION_PATH, p.getPath()).o(SECTION_REPLICATION, p.getReplication()).o(CACHE_MANAGER_SECTION_POOL, p.getPool());
out.print("<" + CACHE_MANAGER_SECTION_EXPIRATION + ">");
CacheDirectiveInfoExpirationProto e = p.getExpiration();
o(CACHE_MANAGER_SECTION_MILLIS, e.getMillis()).o(CACHE_MANAGER_SECTION_RELATIVE, e.getIsRelative());
out.print("</" + CACHE_MANAGER_SECTION_EXPIRATION + ">\n");
out.print("</" + CACHE_MANAGER_SECTION_DIRECTIVE + ">\n");
}
out.print("</" + CACHE_MANAGER_SECTION_NAME + ">\n");
}
Aggregations