use of org.apache.jackrabbit.oak.segment.file.JournalEntry in project jackrabbit-oak by apache.
the class ConsistencyChecker method checkConsistency.
/**
* Run a full traversal consistency check.
*
* @param directory directory containing the tar files
* @param journalFileName name of the journal file containing the revision history
* @param debugInterval number of seconds between printing progress information to
* the console during the full traversal phase.
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @param checkHead if {@code true} will check the head
* @param checkpoints collection of checkpoints to be checked
* @param filterPaths collection of repository paths to be checked
* @param ioStatistics if {@code true} prints I/O statistics gathered while consistency
* check was performed
* @param outWriter text output stream writer
* @param errWriter text error stream writer
* @throws IOException
* @throws InvalidFileStoreVersionException
*/
public static void checkConsistency(File directory, String journalFileName, long debugInterval, boolean checkBinaries, boolean checkHead, Set<String> checkpoints, Set<String> filterPaths, boolean ioStatistics, PrintWriter outWriter, PrintWriter errWriter) throws IOException, InvalidFileStoreVersionException {
try (JournalReader journal = new JournalReader(new LocalJournalFile(directory, journalFileName));
ConsistencyChecker checker = new ConsistencyChecker(directory, debugInterval, ioStatistics, outWriter, errWriter)) {
Set<String> checkpointsSet = Sets.newLinkedHashSet();
List<PathToCheck> headPaths = new ArrayList<>();
Map<String, List<PathToCheck>> checkpointPaths = new HashMap<>();
int revisionCount = 0;
if (!checkpoints.isEmpty()) {
checkpointsSet.addAll(checkpoints);
if (checkpointsSet.remove("all")) {
checkpointsSet = Sets.newLinkedHashSet(SegmentNodeStoreBuilders.builder(checker.store).build().checkpoints());
}
}
for (String path : filterPaths) {
if (checkHead) {
headPaths.add(new PathToCheck(path, null));
checker.checkCount++;
}
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
if (pathList == null) {
pathList = new ArrayList<>();
checkpointPaths.put(checkpoint, pathList);
}
pathList.add(new PathToCheck(path, checkpoint));
checker.checkCount++;
}
}
int initialCount = checker.checkCount;
JournalEntry lastValidJournalEntry = null;
while (journal.hasNext() && checker.checkCount > 0) {
JournalEntry journalEntry = journal.next();
String revision = journalEntry.getRevision();
try {
revisionCount++;
checker.store.setRevision(revision);
boolean overallValid = true;
SegmentNodeStore sns = SegmentNodeStoreBuilders.builder(checker.store).build();
checker.print("\nChecking revision {0}", revision);
if (checkHead) {
boolean mustCheck = headPaths.stream().anyMatch(p -> p.journalEntry == null);
if (mustCheck) {
checker.print("\nChecking head\n");
NodeState root = sns.getRoot();
overallValid = overallValid && checker.checkPathsAtRoot(headPaths, root, journalEntry, checkBinaries);
}
}
if (!checkpointsSet.isEmpty()) {
Map<String, Boolean> checkpointsToCheck = checkpointPaths.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().stream().anyMatch(p -> p.journalEntry == null)));
boolean mustCheck = checkpointsToCheck.values().stream().anyMatch(v -> v == true);
if (mustCheck) {
checker.print("\nChecking checkpoints");
for (String checkpoint : checkpointsSet) {
if (checkpointsToCheck.get(checkpoint)) {
checker.print("\nChecking checkpoint {0}", checkpoint);
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
NodeState root = sns.retrieve(checkpoint);
if (root == null) {
checker.printError("Checkpoint {0} not found in this revision!", checkpoint);
overallValid = false;
} else {
overallValid = overallValid && checker.checkPathsAtRoot(pathList, root, journalEntry, checkBinaries);
}
}
}
}
}
if (overallValid) {
lastValidJournalEntry = journalEntry;
}
} catch (IllegalArgumentException e) {
checker.printError("Skipping invalid record id {0}", revision);
}
}
checker.print("\nSearched through {0} revisions and {1} checkpoints", revisionCount, checkpointsSet.size());
if (initialCount == checker.checkCount) {
checker.print("No good revision found");
} else {
if (checkHead) {
checker.print("\nHead");
checker.printResults(headPaths, NO_INDENT);
}
if (!checkpointsSet.isEmpty()) {
checker.print("\nCheckpoints");
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
checker.print("- {0}", checkpoint);
checker.printResults(pathList, CHECKPOINT_INDENT);
}
}
checker.print("\nOverall");
checker.printOverallResults(lastValidJournalEntry);
}
if (ioStatistics) {
checker.print("[I/O] Segment read: Number of operations: {0}", checker.statisticsIOMonitor.ioOperations);
checker.print("[I/O] Segment read: Total size: {0} ({1} bytes)", humanReadableByteCount(checker.statisticsIOMonitor.readBytes.get()), checker.statisticsIOMonitor.readBytes);
checker.print("[I/O] Segment read: Total time: {0} ns", checker.statisticsIOMonitor.readTime);
}
}
}
use of org.apache.jackrabbit.oak.segment.file.JournalEntry in project jackrabbit-oak by apache.
the class SegmentTarExplorerBackend method readRevisions.
@Override
public List<String> readRevisions() {
JournalFile journal = new LocalJournalFile(path, "journal.log");
if (!journal.exists()) {
return newArrayList();
}
List<String> revs = newArrayList();
JournalReader journalReader = null;
try {
journalReader = new JournalReader(journal);
Iterator<String> revisionIterator = Iterators.transform(journalReader, new Function<JournalEntry, String>() {
@Nullable
@Override
public String apply(JournalEntry entry) {
return entry.getRevision();
}
});
try {
revs = newArrayList(revisionIterator);
} finally {
journalReader.close();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (journalReader != null) {
journalReader.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return revs;
}
Aggregations