use of org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile in project jackrabbit-oak by apache.
the class ConsistencyChecker method checkConsistency.
/**
* Run a full traversal consistency check.
*
* @param directory directory containing the tar files
* @param journalFileName name of the journal file containing the revision history
* @param debugInterval number of seconds between printing progress information to
* the console during the full traversal phase.
* @param checkBinaries if {@code true} full content of binary properties will be scanned
* @param checkHead if {@code true} will check the head
* @param checkpoints collection of checkpoints to be checked
* @param filterPaths collection of repository paths to be checked
* @param ioStatistics if {@code true} prints I/O statistics gathered while consistency
* check was performed
* @param outWriter text output stream writer
* @param errWriter text error stream writer
* @throws IOException
* @throws InvalidFileStoreVersionException
*/
public static void checkConsistency(File directory, String journalFileName, long debugInterval, boolean checkBinaries, boolean checkHead, Set<String> checkpoints, Set<String> filterPaths, boolean ioStatistics, PrintWriter outWriter, PrintWriter errWriter) throws IOException, InvalidFileStoreVersionException {
try (JournalReader journal = new JournalReader(new LocalJournalFile(directory, journalFileName));
ConsistencyChecker checker = new ConsistencyChecker(directory, debugInterval, ioStatistics, outWriter, errWriter)) {
Set<String> checkpointsSet = Sets.newLinkedHashSet();
List<PathToCheck> headPaths = new ArrayList<>();
Map<String, List<PathToCheck>> checkpointPaths = new HashMap<>();
int revisionCount = 0;
if (!checkpoints.isEmpty()) {
checkpointsSet.addAll(checkpoints);
if (checkpointsSet.remove("all")) {
checkpointsSet = Sets.newLinkedHashSet(SegmentNodeStoreBuilders.builder(checker.store).build().checkpoints());
}
}
for (String path : filterPaths) {
if (checkHead) {
headPaths.add(new PathToCheck(path, null));
checker.checkCount++;
}
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
if (pathList == null) {
pathList = new ArrayList<>();
checkpointPaths.put(checkpoint, pathList);
}
pathList.add(new PathToCheck(path, checkpoint));
checker.checkCount++;
}
}
int initialCount = checker.checkCount;
JournalEntry lastValidJournalEntry = null;
while (journal.hasNext() && checker.checkCount > 0) {
JournalEntry journalEntry = journal.next();
String revision = journalEntry.getRevision();
try {
revisionCount++;
checker.store.setRevision(revision);
boolean overallValid = true;
SegmentNodeStore sns = SegmentNodeStoreBuilders.builder(checker.store).build();
checker.print("\nChecking revision {0}", revision);
if (checkHead) {
boolean mustCheck = headPaths.stream().anyMatch(p -> p.journalEntry == null);
if (mustCheck) {
checker.print("\nChecking head\n");
NodeState root = sns.getRoot();
overallValid = overallValid && checker.checkPathsAtRoot(headPaths, root, journalEntry, checkBinaries);
}
}
if (!checkpointsSet.isEmpty()) {
Map<String, Boolean> checkpointsToCheck = checkpointPaths.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().stream().anyMatch(p -> p.journalEntry == null)));
boolean mustCheck = checkpointsToCheck.values().stream().anyMatch(v -> v == true);
if (mustCheck) {
checker.print("\nChecking checkpoints");
for (String checkpoint : checkpointsSet) {
if (checkpointsToCheck.get(checkpoint)) {
checker.print("\nChecking checkpoint {0}", checkpoint);
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
NodeState root = sns.retrieve(checkpoint);
if (root == null) {
checker.printError("Checkpoint {0} not found in this revision!", checkpoint);
overallValid = false;
} else {
overallValid = overallValid && checker.checkPathsAtRoot(pathList, root, journalEntry, checkBinaries);
}
}
}
}
}
if (overallValid) {
lastValidJournalEntry = journalEntry;
}
} catch (IllegalArgumentException e) {
checker.printError("Skipping invalid record id {0}", revision);
}
}
checker.print("\nSearched through {0} revisions and {1} checkpoints", revisionCount, checkpointsSet.size());
if (initialCount == checker.checkCount) {
checker.print("No good revision found");
} else {
if (checkHead) {
checker.print("\nHead");
checker.printResults(headPaths, NO_INDENT);
}
if (!checkpointsSet.isEmpty()) {
checker.print("\nCheckpoints");
for (String checkpoint : checkpointsSet) {
List<PathToCheck> pathList = checkpointPaths.get(checkpoint);
checker.print("- {0}", checkpoint);
checker.printResults(pathList, CHECKPOINT_INDENT);
}
}
checker.print("\nOverall");
checker.printOverallResults(lastValidJournalEntry);
}
if (ioStatistics) {
checker.print("[I/O] Segment read: Number of operations: {0}", checker.statisticsIOMonitor.ioOperations);
checker.print("[I/O] Segment read: Total size: {0} ({1} bytes)", humanReadableByteCount(checker.statisticsIOMonitor.readBytes.get()), checker.statisticsIOMonitor.readBytes);
checker.print("[I/O] Segment read: Total time: {0} ns", checker.statisticsIOMonitor.readTime);
}
}
}
use of org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile in project jackrabbit-oak by apache.
the class JournalReaderTest method createJournalReader.
protected JournalReader createJournalReader(String s) throws IOException {
File journalFile = folder.newFile("jrt");
write(journalFile, s);
return new JournalReader(new LocalJournalFile(journalFile));
}
use of org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile in project jackrabbit-oak by apache.
the class JournalEntryTest method timestampInJournalEntry.
@Test
public void timestampInJournalEntry() throws Exception {
FileStore fileStore = fileStoreBuilder(tempFolder.getRoot()).withMaxFileSize(5).withSegmentCacheSize(0).withStringCacheSize(0).withTemplateCacheSize(0).withMemoryMapping(true).build();
SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build();
long startTime = System.currentTimeMillis();
for (int i = 0; i < 5; i++) {
NodeBuilder root = nodeStore.getRoot().builder();
root.child("c" + i);
nodeStore.merge(root, EmptyHook.INSTANCE, CommitInfo.EMPTY);
fileStore.flush();
}
fileStore.close();
File journal = new File(tempFolder.getRoot(), "journal.log");
List<String> lines = Files.readLines(journal, Charset.defaultCharset());
assertFalse(lines.isEmpty());
String line = lines.get(0);
List<String> parts = journalParts(line);
assertEquals(3, parts.size());
long entryTime = Long.valueOf(parts.get(2));
assertTrue(entryTime >= startTime);
JournalReader jr = new JournalReader(new LocalJournalFile(journal));
JournalEntry journalEntry = jr.next();
assertEquals(journalParts(lines.get(lines.size() - 1)).get(0), journalEntry.getRevision());
assertEquals(journalParts(lines.get(lines.size() - 1)).get(2), String.valueOf(journalEntry.getTimestamp()));
jr.close();
}
use of org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile in project jackrabbit-oak by apache.
the class Compact method run.
public int run() {
System.out.printf("Compacting %s with %s\n", path, fileAccessMode.description);
System.out.printf(" before\n");
Set<File> beforeFiles = listFiles(path);
printFiles(System.out, beforeFiles);
System.out.printf(" size %s\n", printableSize(sizeOfDirectory(path)));
System.out.printf(" -> compacting\n");
Stopwatch watch = Stopwatch.createStarted();
try (FileStore store = newFileStore()) {
if (!store.compactFull()) {
System.out.printf("Compaction cancelled after %s.\n", printableStopwatch(watch));
return 1;
}
System.out.printf(" -> cleaning up\n");
store.cleanup();
JournalFile journal = new LocalJournalFile(path, "journal.log");
String head;
try (JournalReader journalReader = new JournalReader(journal)) {
head = String.format("%s root %s\n", journalReader.next().getRevision(), System.currentTimeMillis());
}
try (JournalFileWriter journalWriter = journal.openJournalWriter()) {
System.out.printf(" -> writing new %s: %s\n", journal.getName(), head);
journalWriter.truncate();
journalWriter.writeLine(head);
}
} catch (Exception e) {
watch.stop();
e.printStackTrace(System.err);
System.out.printf("Compaction failed after %s.\n", printableStopwatch(watch));
return 1;
}
watch.stop();
System.out.printf(" after\n");
Set<File> afterFiles = listFiles(path);
printFiles(System.out, afterFiles);
System.out.printf(" size %s\n", printableSize(sizeOfDirectory(path)));
System.out.printf(" removed files %s\n", fileNames(difference(beforeFiles, afterFiles)));
System.out.printf(" added files %s\n", fileNames(difference(afterFiles, beforeFiles)));
System.out.printf("Compaction succeeded in %s.\n", printableStopwatch(watch));
return 0;
}
use of org.apache.jackrabbit.oak.segment.file.tar.LocalJournalFile in project jackrabbit-oak by apache.
the class SegmentTarExplorerBackend method readRevisions.
@Override
public List<String> readRevisions() {
JournalFile journal = new LocalJournalFile(path, "journal.log");
if (!journal.exists()) {
return newArrayList();
}
List<String> revs = newArrayList();
JournalReader journalReader = null;
try {
journalReader = new JournalReader(journal);
Iterator<String> revisionIterator = Iterators.transform(journalReader, new Function<JournalEntry, String>() {
@Nullable
@Override
public String apply(JournalEntry entry) {
return entry.getRevision();
}
});
try {
revs = newArrayList(revisionIterator);
} finally {
journalReader.close();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (journalReader != null) {
journalReader.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return revs;
}
Aggregations