Search in sources :

Example 1 with DeleteSlice

use of org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice in project lucene-solr by apache.

the class TestDocumentsWriterDeleteQueue method testStressDeleteQueue.

public void testStressDeleteQueue() throws InterruptedException {
    DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
    Set<Term> uniqueValues = new HashSet<>();
    final int size = 10000 + random().nextInt(500) * RANDOM_MULTIPLIER;
    Integer[] ids = new Integer[size];
    for (int i = 0; i < ids.length; i++) {
        ids[i] = random().nextInt();
        uniqueValues.add(new Term("id", ids[i].toString()));
    }
    CountDownLatch latch = new CountDownLatch(1);
    AtomicInteger index = new AtomicInteger(0);
    final int numThreads = 2 + random().nextInt(5);
    UpdateThread[] threads = new UpdateThread[numThreads];
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new UpdateThread(queue, index, ids, latch);
        threads[i].start();
    }
    latch.countDown();
    for (int i = 0; i < threads.length; i++) {
        threads[i].join();
    }
    for (UpdateThread updateThread : threads) {
        DeleteSlice slice = updateThread.slice;
        queue.updateSlice(slice);
        BufferedUpdates deletes = updateThread.deletes;
        slice.apply(deletes, BufferedUpdates.MAX_INT);
        assertEquals(uniqueValues, deletes.terms.keySet());
    }
    queue.tryApplyGlobalSlice();
    Set<Term> frozenSet = new HashSet<>();
    BytesRefBuilder builder = new BytesRefBuilder();
    TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
    while (iter.next() != null) {
        builder.copyBytes(iter.bytes);
        frozenSet.add(new Term(iter.field(), builder.toBytesRef()));
    }
    assertEquals("num deletes must be 0 after freeze", 0, queue.numGlobalTermDeletes());
    assertEquals(uniqueValues.size(), frozenSet.size());
    assertEquals(uniqueValues, frozenSet);
}
Also used : BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) TermIterator(org.apache.lucene.index.PrefixCodedTerms.TermIterator) CountDownLatch(java.util.concurrent.CountDownLatch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DeleteSlice(org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice) HashSet(java.util.HashSet)

Example 2 with DeleteSlice

use of org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice in project lucene-solr by apache.

the class TestDocumentsWriterDeleteQueue method testUpdateDelteSlices.

public void testUpdateDelteSlices() {
    DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue();
    final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER;
    Integer[] ids = new Integer[size];
    for (int i = 0; i < ids.length; i++) {
        ids[i] = random().nextInt();
    }
    DeleteSlice slice1 = queue.newSlice();
    DeleteSlice slice2 = queue.newSlice();
    BufferedUpdates bd1 = new BufferedUpdates("bd1");
    BufferedUpdates bd2 = new BufferedUpdates("bd2");
    int last1 = 0;
    int last2 = 0;
    Set<Term> uniqueValues = new HashSet<>();
    for (int j = 0; j < ids.length; j++) {
        Integer i = ids[j];
        // create an array here since we compare identity below against tailItem
        Term[] term = new Term[] { new Term("id", i.toString()) };
        uniqueValues.add(term[0]);
        queue.addDelete(term);
        if (random().nextInt(20) == 0 || j == ids.length - 1) {
            queue.updateSlice(slice1);
            assertTrue(slice1.isTailItem(term));
            slice1.apply(bd1, j);
            assertAllBetween(last1, j, bd1, ids);
            last1 = j + 1;
        }
        if (random().nextInt(10) == 5 || j == ids.length - 1) {
            queue.updateSlice(slice2);
            assertTrue(slice2.isTailItem(term));
            slice2.apply(bd2, j);
            assertAllBetween(last2, j, bd2, ids);
            last2 = j + 1;
        }
        assertEquals(j + 1, queue.numGlobalTermDeletes());
    }
    assertEquals(uniqueValues, bd1.terms.keySet());
    assertEquals(uniqueValues, bd2.terms.keySet());
    HashSet<Term> frozenSet = new HashSet<>();
    BytesRefBuilder bytesRef = new BytesRefBuilder();
    TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
    while (iter.next() != null) {
        bytesRef.copyBytes(iter.bytes);
        frozenSet.add(new Term(iter.field(), bytesRef.toBytesRef()));
    }
    assertEquals(uniqueValues, frozenSet);
    assertEquals("num deletes must be 0 after freeze", 0, queue.numGlobalTermDeletes());
}
Also used : BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) TermIterator(org.apache.lucene.index.PrefixCodedTerms.TermIterator) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) DeleteSlice(org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice) HashSet(java.util.HashSet)

Aggregations

HashSet (java.util.HashSet)2 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)2 DeleteSlice (org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice)2 TermIterator (org.apache.lucene.index.PrefixCodedTerms.TermIterator)2 BytesRefBuilder (org.apache.lucene.util.BytesRefBuilder)2 CountDownLatch (java.util.concurrent.CountDownLatch)1