Search in sources :

Example 16 with MockDirectoryWrapper

use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.

the class TestIndexWriterWithThreads method _testMultipleThreadsFailure.

// Runs test, with multiple threads, using the specific
// failure to trigger an IOException
public void _testMultipleThreadsFailure(MockDirectoryWrapper.Failure failure) throws Exception {
    int NUM_THREADS = 3;
    for (int iter = 0; iter < 2; iter++) {
        if (VERBOSE) {
            System.out.println("TEST: iter=" + iter);
        }
        MockDirectoryWrapper dir = newMockDirectory();
        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(4)).setCommitOnClose(false));
        ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
        IndexerThread[] threads = new IndexerThread[NUM_THREADS];
        for (int i = 0; i < NUM_THREADS; i++) threads[i] = new IndexerThread(writer, true);
        for (int i = 0; i < NUM_THREADS; i++) threads[i].start();
        Thread.sleep(10);
        dir.failOn(failure);
        failure.setDoFail();
        for (int i = 0; i < NUM_THREADS; i++) {
            threads[i].join();
            assertTrue("hit unexpected Throwable", threads[i].error == null);
        }
        boolean success = false;
        try {
            writer.commit();
            writer.close();
            success = true;
        } catch (AlreadyClosedException ace) {
            // OK: abort closes the writer
            assertTrue(writer.deleter.isClosed());
        } catch (IOException ioe) {
            writer.rollback();
            failure.clearDoFail();
        }
        if (VERBOSE) {
            System.out.println("TEST: success=" + success);
        }
        if (success) {
            IndexReader reader = DirectoryReader.open(dir);
            final Bits delDocs = MultiFields.getLiveDocs(reader);
            for (int j = 0; j < reader.maxDoc(); j++) {
                if (delDocs == null || !delDocs.get(j)) {
                    reader.document(j);
                    reader.getTermVectors(j);
                }
            }
            reader.close();
        }
        dir.close();
    }
}
Also used : MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) Bits(org.apache.lucene.util.Bits) AlreadyClosedException(org.apache.lucene.store.AlreadyClosedException) IOException(java.io.IOException)

Example 17 with MockDirectoryWrapper

use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.

the class TestIndexWriterReader method testDuringAddIndexes.

// Stress test reopen during addIndexes
@Nightly
public void testDuringAddIndexes() throws Exception {
    Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
    final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(2)));
    // create the index
    createIndexNoClose(false, "test", writer);
    writer.commit();
    final Directory[] dirs = new Directory[10];
    for (int i = 0; i < 10; i++) {
        dirs[i] = new MockDirectoryWrapper(random(), TestUtil.ramCopyOf(dir1));
    }
    DirectoryReader r = writer.getReader();
    final int numIterations = 10;
    final List<Throwable> excs = Collections.synchronizedList(new ArrayList<Throwable>());
    // Only one thread can addIndexes at a time, because
    // IndexWriter acquires a write lock in each directory:
    final Thread[] threads = new Thread[1];
    final AtomicBoolean threadDone = new AtomicBoolean(false);
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new Thread() {

            @Override
            public void run() {
                int count = 0;
                do {
                    count++;
                    try {
                        writer.addIndexes(dirs);
                        writer.maybeMerge();
                    } catch (Throwable t) {
                        excs.add(t);
                        throw new RuntimeException(t);
                    }
                } while (count < numIterations);
                threadDone.set(true);
            }
        };
        threads[i].setDaemon(true);
        threads[i].start();
    }
    int lastCount = 0;
    while (threadDone.get() == false) {
        DirectoryReader r2 = DirectoryReader.openIfChanged(r);
        if (r2 != null) {
            r.close();
            r = r2;
            Query q = new TermQuery(new Term("indexname", "test"));
            IndexSearcher searcher = newSearcher(r);
            final int count = searcher.search(q, 10).totalHits;
            assertTrue(count >= lastCount);
            lastCount = count;
        }
    }
    for (int i = 0; i < threads.length; i++) {
        threads[i].join();
    }
    // final check
    DirectoryReader r2 = DirectoryReader.openIfChanged(r);
    if (r2 != null) {
        r.close();
        r = r2;
    }
    Query q = new TermQuery(new Term("indexname", "test"));
    IndexSearcher searcher = newSearcher(r);
    final int count = searcher.search(q, 10).totalHits;
    assertTrue(count >= lastCount);
    assertEquals(0, excs.size());
    r.close();
    if (dir1 instanceof MockDirectoryWrapper) {
        final Collection<String> openDeletedFiles = ((MockDirectoryWrapper) dir1).getOpenDeletedFiles();
        assertEquals("openDeleted=" + openDeletedFiles, 0, openDeletedFiles.size());
    }
    writer.close();
    dir1.close();
}
Also used : MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) IndexSearcher(org.apache.lucene.search.IndexSearcher) TermQuery(org.apache.lucene.search.TermQuery) Query(org.apache.lucene.search.Query) TermQuery(org.apache.lucene.search.TermQuery) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) RAMDirectory(org.apache.lucene.store.RAMDirectory) Directory(org.apache.lucene.store.Directory)

Example 18 with MockDirectoryWrapper

use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.

the class Test2BPostingsBytes method test.

public void test() throws Exception {
    IndexWriterConfig defaultConfig = new IndexWriterConfig(null);
    Codec defaultCodec = defaultConfig.getCodec();
    if ((new IndexWriterConfig(null)).getCodec() instanceof CompressingCodec) {
        Pattern regex = Pattern.compile("maxDocsPerChunk=(\\d+), blockSize=(\\d+)");
        Matcher matcher = regex.matcher(defaultCodec.toString());
        assertTrue("Unexpected CompressingCodec toString() output: " + defaultCodec.toString(), matcher.find());
        int maxDocsPerChunk = Integer.parseInt(matcher.group(1));
        int blockSize = Integer.parseInt(matcher.group(2));
        int product = maxDocsPerChunk * blockSize;
        assumeTrue(defaultCodec.getName() + " maxDocsPerChunk (" + maxDocsPerChunk + ") * blockSize (" + blockSize + ") < 16 - this can trigger OOM with -Dtests.heapsize=30g", product >= 16);
    }
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BPostingsBytes1"));
    if (dir instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec()));
    MergePolicy mp = w.getConfig().getMergePolicy();
    if (mp instanceof LogByteSizeMergePolicy) {
        // 1 petabyte:
        ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024);
    }
    Document doc = new Document();
    FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
    ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
    ft.setOmitNorms(true);
    MyTokenStream tokenStream = new MyTokenStream();
    Field field = new Field("field", tokenStream, ft);
    doc.add(field);
    final int numDocs = 1000;
    for (int i = 0; i < numDocs; i++) {
        if (i % 2 == 1) {
            // trick blockPF's little optimization
            tokenStream.n = 65536;
        } else {
            tokenStream.n = 65537;
        }
        w.addDocument(doc);
    }
    w.forceMerge(1);
    w.close();
    DirectoryReader oneThousand = DirectoryReader.open(dir);
    DirectoryReader[] subReaders = new DirectoryReader[1000];
    Arrays.fill(subReaders, oneThousand);
    BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
    if (dir2 instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
    IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(null));
    TestUtil.addIndexesSlowly(w2, subReaders);
    w2.forceMerge(1);
    w2.close();
    oneThousand.close();
    DirectoryReader oneMillion = DirectoryReader.open(dir2);
    subReaders = new DirectoryReader[2000];
    Arrays.fill(subReaders, oneMillion);
    BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
    if (dir3 instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
    IndexWriter w3 = new IndexWriter(dir3, new IndexWriterConfig(null));
    TestUtil.addIndexesSlowly(w3, subReaders);
    w3.forceMerge(1);
    w3.close();
    oneMillion.close();
    dir.close();
    dir2.close();
    dir3.close();
}
Also used : MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) Pattern(java.util.regex.Pattern) CompressingCodec(org.apache.lucene.codecs.compressing.CompressingCodec) Matcher(java.util.regex.Matcher) Document(org.apache.lucene.document.Document) FieldType(org.apache.lucene.document.FieldType) Field(org.apache.lucene.document.Field) TextField(org.apache.lucene.document.TextField) CompressingCodec(org.apache.lucene.codecs.compressing.CompressingCodec) Codec(org.apache.lucene.codecs.Codec) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) BaseDirectoryWrapper(org.apache.lucene.store.BaseDirectoryWrapper)

Example 19 with MockDirectoryWrapper

use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.

the class Test2BTerms method test2BTerms.

public void test2BTerms() throws IOException {
    System.out.println("Starting Test2B");
    final long TERM_COUNT = ((long) Integer.MAX_VALUE) + 100000000;
    final int TERMS_PER_DOC = TestUtil.nextInt(random(), 100000, 1000000);
    List<BytesRef> savedTerms = null;
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BTerms"));
    //MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
    if (dir instanceof MockDirectoryWrapper) {
        ((MockDirectoryWrapper) dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
    // don't double-checkindex
    dir.setCheckIndexOnClose(false);
    if (true) {
        IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setRAMBufferSizeMB(256.0).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy(false, 10)).setOpenMode(IndexWriterConfig.OpenMode.CREATE).setCodec(TestUtil.getDefaultCodec()));
        MergePolicy mp = w.getConfig().getMergePolicy();
        if (mp instanceof LogByteSizeMergePolicy) {
            // 1 petabyte:
            ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024);
        }
        Document doc = new Document();
        final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC);
        FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
        customType.setIndexOptions(IndexOptions.DOCS);
        customType.setOmitNorms(true);
        Field field = new Field("field", ts, customType);
        doc.add(field);
        //w.setInfoStream(System.out);
        final int numDocs = (int) (TERM_COUNT / TERMS_PER_DOC);
        System.out.println("TERMS_PER_DOC=" + TERMS_PER_DOC);
        System.out.println("numDocs=" + numDocs);
        for (int i = 0; i < numDocs; i++) {
            final long t0 = System.currentTimeMillis();
            w.addDocument(doc);
            System.out.println(i + " of " + numDocs + " " + (System.currentTimeMillis() - t0) + " msec");
        }
        savedTerms = ts.savedTerms;
        System.out.println("TEST: full merge");
        w.forceMerge(1);
        System.out.println("TEST: close writer");
        w.close();
    }
    System.out.println("TEST: open reader");
    final IndexReader r = DirectoryReader.open(dir);
    if (savedTerms == null) {
        savedTerms = findTerms(r);
    }
    final int numSavedTerms = savedTerms.size();
    final List<BytesRef> bigOrdTerms = new ArrayList<>(savedTerms.subList(numSavedTerms - 10, numSavedTerms));
    System.out.println("TEST: test big ord terms...");
    testSavedTerms(r, bigOrdTerms);
    System.out.println("TEST: test all saved terms...");
    testSavedTerms(r, savedTerms);
    r.close();
    System.out.println("TEST: now CheckIndex...");
    CheckIndex.Status status = TestUtil.checkIndex(dir);
    final long tc = status.segmentInfos.get(0).termIndexStatus.termCount;
    assertTrue("count " + tc + " is not > " + Integer.MAX_VALUE, tc > Integer.MAX_VALUE);
    dir.close();
    System.out.println("TEST: done!");
}
Also used : MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) ArrayList(java.util.ArrayList) Document(org.apache.lucene.document.Document) FieldType(org.apache.lucene.document.FieldType) Field(org.apache.lucene.document.Field) TextField(org.apache.lucene.document.TextField) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) BaseDirectoryWrapper(org.apache.lucene.store.BaseDirectoryWrapper) BytesRef(org.apache.lucene.util.BytesRef)

Example 20 with MockDirectoryWrapper

use of org.apache.lucene.store.MockDirectoryWrapper in project lucene-solr by apache.

the class Test4GBStoredFields method test.

@Nightly
public void test() throws Exception {
    assumeWorkingMMapOnWindows();
    MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("4GBStoredFields")));
    dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
    iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
    iwc.setRAMBufferSizeMB(256.0);
    iwc.setMergeScheduler(new ConcurrentMergeScheduler());
    iwc.setMergePolicy(newLogMergePolicy(false, 10));
    iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
    // maybe we should factor out crazy cases to ExtremeCompressing? then annotations can handle this stuff...
    if (random().nextBoolean()) {
        iwc.setCodec(CompressingCodec.reasonableInstance(random()));
    }
    IndexWriter w = new IndexWriter(dir, iwc);
    MergePolicy mp = w.getConfig().getMergePolicy();
    if (mp instanceof LogByteSizeMergePolicy) {
        // 1 petabyte:
        ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024 * 1024 * 1024);
    }
    final Document doc = new Document();
    final FieldType ft = new FieldType();
    ft.setStored(true);
    ft.freeze();
    final int valueLength = RandomNumbers.randomIntBetween(random(), 1 << 13, 1 << 20);
    final byte[] value = new byte[valueLength];
    for (int i = 0; i < valueLength; ++i) {
        // random so that even compressing codecs can't compress it
        value[i] = (byte) random().nextInt(256);
    }
    final Field f = new Field("fld", value, ft);
    doc.add(f);
    final int numDocs = (int) ((1L << 32) / valueLength + 100);
    for (int i = 0; i < numDocs; ++i) {
        w.addDocument(doc);
        if (VERBOSE && i % (numDocs / 10) == 0) {
            System.out.println(i + " of " + numDocs + "...");
        }
    }
    w.forceMerge(1);
    w.close();
    if (VERBOSE) {
        boolean found = false;
        for (String file : dir.listAll()) {
            if (file.endsWith(".fdt")) {
                final long fileLength = dir.fileLength(file);
                if (fileLength >= 1L << 32) {
                    found = true;
                }
                System.out.println("File length of " + file + " : " + fileLength);
            }
        }
        if (!found) {
            System.out.println("No .fdt file larger than 4GB, test bug?");
        }
    }
    DirectoryReader rd = DirectoryReader.open(dir);
    Document sd = rd.document(numDocs - 1);
    assertNotNull(sd);
    assertEquals(1, sd.getFields().size());
    BytesRef valueRef = sd.getBinaryValue("fld");
    assertNotNull(valueRef);
    assertEquals(new BytesRef(value), valueRef);
    rd.close();
    dir.close();
}
Also used : MockDirectoryWrapper(org.apache.lucene.store.MockDirectoryWrapper) Document(org.apache.lucene.document.Document) MMapDirectory(org.apache.lucene.store.MMapDirectory) FieldType(org.apache.lucene.document.FieldType) Field(org.apache.lucene.document.Field) MockAnalyzer(org.apache.lucene.analysis.MockAnalyzer) BytesRef(org.apache.lucene.util.BytesRef)

Aggregations

MockDirectoryWrapper (org.apache.lucene.store.MockDirectoryWrapper)121 Document (org.apache.lucene.document.Document)61 MockAnalyzer (org.apache.lucene.analysis.MockAnalyzer)55 Directory (org.apache.lucene.store.Directory)32 IOException (java.io.IOException)30 TextField (org.apache.lucene.document.TextField)17 RAMDirectory (org.apache.lucene.store.RAMDirectory)17 AlreadyClosedException (org.apache.lucene.store.AlreadyClosedException)15 BaseDirectoryWrapper (org.apache.lucene.store.BaseDirectoryWrapper)15 FakeIOException (org.apache.lucene.store.MockDirectoryWrapper.FakeIOException)15 FieldType (org.apache.lucene.document.FieldType)14 Field (org.apache.lucene.document.Field)12 Random (java.util.Random)11 NumericDocValuesField (org.apache.lucene.document.NumericDocValuesField)11 Failure (org.apache.lucene.store.MockDirectoryWrapper.Failure)11 BytesRef (org.apache.lucene.util.BytesRef)11 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)10 Codec (org.apache.lucene.codecs.Codec)10 StringField (org.apache.lucene.document.StringField)9 IndexSearcher (org.apache.lucene.search.IndexSearcher)9