use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestCrash method crash.
private void crash(final IndexWriter writer) throws IOException {
final MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
cms.sync();
dir.crash();
cms.sync();
dir.clearCrash();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestCrash method testCrashWhileIndexing.
public void testCrashWhileIndexing() throws IOException {
// This test relies on being able to open a reader before any commit
// happened, so we must create an initial commit just to allow that, but
// before any documents were added.
IndexWriter writer = initIndex(random(), true);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
// We create leftover files because merging could be
// running when we crash:
dir.setAssertNoUnrefencedFilesOnClose(false);
crash(writer);
IndexReader reader = DirectoryReader.open(dir);
assertTrue(reader.numDocs() < 157);
reader.close();
// Make a new dir, copying from the crashed dir, and
// open IW on it, to confirm IW "recovers" after a
// crash:
Directory dir2 = newDirectory(dir);
dir.close();
new RandomIndexWriter(random(), dir2).close();
dir2.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestCrash method testWriterAfterCrash.
public void testWriterAfterCrash() throws IOException {
// before any documents were added.
if (VERBOSE) {
System.out.println("TEST: initIndex");
}
IndexWriter writer = initIndex(random(), true);
if (VERBOSE) {
System.out.println("TEST: done initIndex");
}
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
// We create leftover files because merging could be
// running / store files could be open when we crash:
dir.setAssertNoUnrefencedFilesOnClose(false);
if (VERBOSE) {
System.out.println("TEST: now crash");
}
crash(writer);
writer = initIndex(random(), dir, false, true);
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertTrue(reader.numDocs() < 314);
reader.close();
// Make a new dir, copying from the crashed dir, and
// open IW on it, to confirm IW "recovers" after a
// crash:
Directory dir2 = newDirectory(dir);
dir.close();
new RandomIndexWriter(random(), dir2).close();
dir2.close();
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestDoc method testIndexAndMerge.
/** This test executes a number of merges and compares the contents of
* the segments created when using compound file or not using one.
*
* TODO: the original test used to print the segment contents to System.out
* for visual validation. To have the same effect, a new method
* checkSegment(String name, ...) should be created that would
* assert various things about the segment.
*/
public void testIndexAndMerge() throws Exception {
StringWriter sw = new StringWriter();
PrintWriter out = new PrintWriter(sw, true);
Directory directory = newFSDirectory(indexDir);
if (directory instanceof MockDirectoryWrapper) {
// We create unreferenced files (we don't even write
// a segments file):
((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
}
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(-1).setMergePolicy(newLogMergePolicy(10)));
SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);
SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
printSegment(out, si2);
writer.close();
SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
printSegment(out, siMerge);
SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
printSegment(out, siMerge2);
SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
printSegment(out, siMerge3);
directory.close();
out.close();
sw.close();
String multiFileOutput = sw.toString();
//System.out.println(multiFileOutput);
sw = new StringWriter();
out = new PrintWriter(sw, true);
directory = newFSDirectory(indexDir);
if (directory instanceof MockDirectoryWrapper) {
// We create unreferenced files (we don't even write
// a segments file):
((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
}
writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(-1).setMergePolicy(newLogMergePolicy(10)));
si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);
si2 = indexDoc(writer, "test2.txt");
printSegment(out, si2);
writer.close();
siMerge = merge(directory, si1, si2, "_merge", true);
printSegment(out, siMerge);
siMerge2 = merge(directory, si1, si2, "_merge2", true);
printSegment(out, siMerge2);
siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
printSegment(out, siMerge3);
directory.close();
out.close();
sw.close();
String singleFileOutput = sw.toString();
assertEquals(multiFileOutput, singleFileOutput);
}
use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.
the class TestIndexWriterWithThreads method _testSingleThreadFailure.
// Runs test, with one thread, using the specific failure
// to trigger an IOException
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setCommitOnClose(false);
if (iwc.getMergeScheduler() instanceof ConcurrentMergeScheduler) {
iwc.setMergeScheduler(new SuppressingConcurrentMergeScheduler() {
@Override
protected boolean isOK(Throwable th) {
return th instanceof AlreadyClosedException || (th instanceof IllegalStateException && th.getMessage().contains("this writer hit an unrecoverable error"));
}
});
}
IndexWriter writer = new IndexWriter(dir, iwc);
final Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectorOffsets(true);
doc.add(newField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType));
for (int i = 0; i < 6; i++) writer.addDocument(doc);
dir.failOn(failure);
failure.setDoFail();
expectThrows(IOException.class, () -> {
writer.addDocument(doc);
writer.addDocument(doc);
writer.commit();
});
failure.clearDoFail();
expectThrows(AlreadyClosedException.class, () -> {
writer.addDocument(doc);
writer.commit();
writer.close();
});
assertTrue(writer.deleter.isClosed());
dir.close();
}
Aggregations