use of org.apache.jackrabbit.oak.segment.RecordUsageAnalyser in project jackrabbit-oak by apache.
the class DebugStore method debugFileStore.
private static void debugFileStore(ReadOnlyFileStore store) {
Map<SegmentId, List<SegmentId>> idmap = Maps.newHashMap();
int dataCount = 0;
long dataSize = 0;
int bulkCount = 0;
long bulkSize = 0;
RecordUsageAnalyser analyser = new RecordUsageAnalyser(store.getReader());
for (SegmentId id : store.getSegmentIds()) {
if (id.isDataSegmentId()) {
Segment segment = id.getSegment();
dataCount++;
dataSize += segment.size();
idmap.put(id, getReferencedSegmentIds(store, segment));
analyseSegment(segment, analyser);
} else if (id.isBulkSegmentId()) {
bulkCount++;
bulkSize += id.getSegment().size();
idmap.put(id, Collections.<SegmentId>emptyList());
}
}
System.out.println("Total size:");
System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
System.out.println(analyser.toString());
Set<SegmentId> garbage = newHashSet(idmap.keySet());
Queue<SegmentId> queue = Queues.newArrayDeque();
queue.add(store.getRevisions().getHead().getSegmentId());
while (!queue.isEmpty()) {
SegmentId id = queue.remove();
if (garbage.remove(id)) {
queue.addAll(idmap.get(id));
}
}
dataCount = 0;
dataSize = 0;
bulkCount = 0;
bulkSize = 0;
for (SegmentId id : garbage) {
if (id.isDataSegmentId()) {
dataCount++;
dataSize += id.getSegment().size();
} else if (id.isBulkSegmentId()) {
bulkCount++;
bulkSize += id.getSegment().size();
}
}
System.out.format("%nAvailable for garbage collection:%n");
System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
}
Aggregations