Search in sources :

Example 26 with DirectoryReader

use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.

the class ShardUtilsTests method testExtractShardId.

public void testExtractShardId() throws IOException {
    BaseDirectoryWrapper dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
    writer.commit();
    ShardId id = new ShardId("foo", "_na_", random().nextInt());
    try (DirectoryReader reader = DirectoryReader.open(writer)) {
        ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
        assertEquals(id, ShardUtils.extractShardId(wrap));
    }
    final int numDocs = 1 + random().nextInt(5);
    for (int i = 0; i < numDocs; i++) {
        Document d = new Document();
        d.add(newField("name", "foobar", StringField.TYPE_STORED));
        writer.addDocument(d);
        if (random().nextBoolean()) {
            writer.commit();
        }
    }
    try (DirectoryReader reader = DirectoryReader.open(writer)) {
        ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
        assertEquals(id, ShardUtils.extractShardId(wrap));
        CompositeReaderContext context = wrap.getContext();
        for (LeafReaderContext leaf : context.leaves()) {
            assertEquals(id, ShardUtils.extractShardId(leaf.reader()));
        }
    }
    IOUtils.close(writer, dir);
}
Also used : ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) CompositeReaderContext(org.apache.lucene.index.CompositeReaderContext) IndexWriter(org.apache.lucene.index.IndexWriter) DirectoryReader(org.apache.lucene.index.DirectoryReader) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) BaseDirectoryWrapper(org.apache.lucene.store.BaseDirectoryWrapper) LeafReaderContext(org.apache.lucene.index.LeafReaderContext) Document(org.apache.lucene.document.Document)

Example 27 with DirectoryReader

use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.

the class NestedSortingTests method testDuel.

public void testDuel() throws Exception {
    final int numDocs = scaledRandomIntBetween(100, 1000);
    for (int i = 0; i < numDocs; ++i) {
        final int numChildren = randomInt(2);
        List<Document> docs = new ArrayList<>(numChildren + 1);
        for (int j = 0; j < numChildren; ++j) {
            Document doc = new Document();
            doc.add(new StringField("f", TestUtil.randomSimpleString(random(), 2), Field.Store.NO));
            doc.add(new StringField("__type", "child", Field.Store.NO));
            docs.add(doc);
        }
        if (randomBoolean()) {
            docs.add(new Document());
        }
        Document parent = new Document();
        parent.add(new StringField("__type", "parent", Field.Store.NO));
        docs.add(parent);
        writer.addDocuments(docs);
        if (rarely()) {
            // we need to have a bit more segments than what RandomIndexWriter would do by default
            DirectoryReader.open(writer).close();
        }
    }
    writer.commit();
    MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX));
    DirectoryReader reader = DirectoryReader.open(writer);
    reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
    IndexSearcher searcher = new IndexSearcher(reader);
    PagedBytesIndexFieldData indexFieldData1 = getForField("f");
    IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1);
    final String missingValue = randomBoolean() ? null : TestUtil.randomSimpleString(random(), 2);
    final int n = randomIntBetween(1, numDocs + 2);
    final boolean reverse = randomBoolean();
    final TopDocs topDocs1 = getTopDocs(searcher, indexFieldData1, missingValue, sortMode, n, reverse);
    final TopDocs topDocs2 = getTopDocs(searcher, indexFieldData2, missingValue, sortMode, n, reverse);
    for (int i = 0; i < topDocs1.scoreDocs.length; ++i) {
        final FieldDoc fieldDoc1 = (FieldDoc) topDocs1.scoreDocs[i];
        final FieldDoc fieldDoc2 = (FieldDoc) topDocs2.scoreDocs[i];
        assertEquals(fieldDoc1.doc, fieldDoc2.doc);
        assertArrayEquals(fieldDoc1.fields, fieldDoc2.fields);
    }
    searcher.getIndexReader().close();
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) PagedBytesIndexFieldData(org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData) FieldDoc(org.apache.lucene.search.FieldDoc) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) DirectoryReader(org.apache.lucene.index.DirectoryReader) ArrayList(java.util.ArrayList) Document(org.apache.lucene.document.Document) MultiValueMode(org.elasticsearch.search.MultiValueMode) ShardId(org.elasticsearch.index.shard.ShardId) TopDocs(org.apache.lucene.search.TopDocs) StringField(org.apache.lucene.document.StringField)

Example 28 with DirectoryReader

use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.

the class IndicesRequestCacheTests method testInvalidate.

public void testInvalidate() throws Exception {
    ShardRequestCache requestCacheStats = new ShardRequestCache();
    IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
    writer.addDocument(newDoc(0, "foo"));
    DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
    AtomicBoolean indexShard = new AtomicBoolean(true);
    // initial cache
    TestEntity entity = new TestEntity(requestCacheStats, indexShard);
    Loader loader = new Loader(reader, 0);
    BytesReference value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
    assertEquals("foo", value.streamInput().readString());
    assertEquals(0, requestCacheStats.stats().getHitCount());
    assertEquals(1, requestCacheStats.stats().getMissCount());
    assertEquals(0, requestCacheStats.stats().getEvictions());
    assertFalse(loader.loadedFromCache);
    assertEquals(1, cache.count());
    // cache hit
    entity = new TestEntity(requestCacheStats, indexShard);
    loader = new Loader(reader, 0);
    value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
    assertEquals("foo", value.streamInput().readString());
    assertEquals(1, requestCacheStats.stats().getHitCount());
    assertEquals(1, requestCacheStats.stats().getMissCount());
    assertEquals(0, requestCacheStats.stats().getEvictions());
    assertTrue(loader.loadedFromCache);
    assertEquals(1, cache.count());
    assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
    assertEquals(1, cache.numRegisteredCloseListeners());
    // load again after invalidate
    entity = new TestEntity(requestCacheStats, indexShard);
    loader = new Loader(reader, 0);
    cache.invalidate(entity, reader, termQuery.buildAsBytes());
    value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
    assertEquals("foo", value.streamInput().readString());
    assertEquals(1, requestCacheStats.stats().getHitCount());
    assertEquals(2, requestCacheStats.stats().getMissCount());
    assertEquals(0, requestCacheStats.stats().getEvictions());
    assertFalse(loader.loadedFromCache);
    assertEquals(1, cache.count());
    assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
    assertEquals(1, cache.numRegisteredCloseListeners());
    // release
    if (randomBoolean()) {
        reader.close();
    } else {
        // closed shard but reader is still open
        indexShard.set(false);
        cache.clear(entity);
    }
    cache.cleanCache();
    assertEquals(1, requestCacheStats.stats().getHitCount());
    assertEquals(2, requestCacheStats.stats().getMissCount());
    assertEquals(0, requestCacheStats.stats().getEvictions());
    assertEquals(0, cache.count());
    assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt());
    IOUtils.close(reader, writer, dir, cache);
    assertEquals(0, cache.numRegisteredCloseListeners());
}
Also used : ShardRequestCache(org.elasticsearch.index.cache.request.ShardRequestCache) ShardId(org.elasticsearch.index.shard.ShardId) BytesReference(org.elasticsearch.common.bytes.BytesReference) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IndexWriter(org.apache.lucene.index.IndexWriter) DirectoryReader(org.apache.lucene.index.DirectoryReader) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) TermQueryBuilder(org.elasticsearch.index.query.TermQueryBuilder) Directory(org.apache.lucene.store.Directory)

Example 29 with DirectoryReader

use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.

the class IndicesRequestCacheTests method testClearAllEntityIdentity.

public void testClearAllEntityIdentity() throws Exception {
    IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
    AtomicBoolean indexShard = new AtomicBoolean(true);
    ShardRequestCache requestCacheStats = new ShardRequestCache();
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
    writer.addDocument(newDoc(0, "foo"));
    DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
    TestEntity entity = new TestEntity(requestCacheStats, indexShard);
    Loader loader = new Loader(reader, 0);
    writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
    DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
    Loader secondLoader = new Loader(secondReader, 0);
    writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
    DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    AtomicBoolean differentIdentity = new AtomicBoolean(true);
    TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
    Loader thirdLoader = new Loader(thirdReader, 0);
    BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
    assertEquals("foo", value1.streamInput().readString());
    BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
    assertEquals("bar", value2.streamInput().readString());
    logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
    BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
    assertEquals("baz", value3.streamInput().readString());
    assertEquals(3, cache.count());
    final long hitCount = requestCacheStats.stats().getHitCount();
    // clear all for the indexShard Idendity even though is't still open
    cache.clear(randomFrom(entity, secondEntity));
    cache.cleanCache();
    assertEquals(1, cache.count());
    // third has not been validated since it's a different identity
    value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
    assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount());
    assertEquals("baz", value3.streamInput().readString());
    IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache);
}
Also used : ShardRequestCache(org.elasticsearch.index.cache.request.ShardRequestCache) BytesReference(org.elasticsearch.common.bytes.BytesReference) DirectoryReader(org.apache.lucene.index.DirectoryReader) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) TermQueryBuilder(org.elasticsearch.index.query.TermQueryBuilder) Term(org.apache.lucene.index.Term) ShardId(org.elasticsearch.index.shard.ShardId) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IndexWriter(org.apache.lucene.index.IndexWriter) Directory(org.apache.lucene.store.Directory)

Example 30 with DirectoryReader

use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.

the class IndicesRequestCacheTests method testEviction.

public void testEviction() throws Exception {
    final ByteSizeValue size;
    {
        IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
        AtomicBoolean indexShard = new AtomicBoolean(true);
        ShardRequestCache requestCacheStats = new ShardRequestCache();
        Directory dir = newDirectory();
        IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
        writer.addDocument(newDoc(0, "foo"));
        DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
        TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
        TestEntity entity = new TestEntity(requestCacheStats, indexShard);
        Loader loader = new Loader(reader, 0);
        writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
        DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
        TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
        Loader secondLoader = new Loader(secondReader, 0);
        BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
        assertEquals("foo", value1.streamInput().readString());
        BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
        assertEquals("bar", value2.streamInput().readString());
        size = requestCacheStats.stats().getMemorySize();
        IOUtils.close(reader, secondReader, writer, dir, cache);
    }
    IndicesRequestCache cache = new IndicesRequestCache(Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build());
    AtomicBoolean indexShard = new AtomicBoolean(true);
    ShardRequestCache requestCacheStats = new ShardRequestCache();
    Directory dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
    writer.addDocument(newDoc(0, "foo"));
    DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
    TestEntity entity = new TestEntity(requestCacheStats, indexShard);
    Loader loader = new Loader(reader, 0);
    writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
    DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
    Loader secondLoader = new Loader(secondReader, 0);
    writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
    DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
    TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
    Loader thirdLoader = new Loader(thirdReader, 0);
    BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
    assertEquals("foo", value1.streamInput().readString());
    BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
    assertEquals("bar", value2.streamInput().readString());
    logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
    BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
    assertEquals("baz", value3.streamInput().readString());
    assertEquals(2, cache.count());
    assertEquals(1, requestCacheStats.stats().getEvictions());
    IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache);
}
Also used : ShardRequestCache(org.elasticsearch.index.cache.request.ShardRequestCache) BytesReference(org.elasticsearch.common.bytes.BytesReference) DirectoryReader(org.apache.lucene.index.DirectoryReader) ElasticsearchDirectoryReader(org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader) ByteSizeValue(org.elasticsearch.common.unit.ByteSizeValue) TermQueryBuilder(org.elasticsearch.index.query.TermQueryBuilder) Term(org.apache.lucene.index.Term) ShardId(org.elasticsearch.index.shard.ShardId) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) IndexWriter(org.apache.lucene.index.IndexWriter) Directory(org.apache.lucene.store.Directory)

Aggregations

DirectoryReader (org.apache.lucene.index.DirectoryReader)324 Document (org.apache.lucene.document.Document)217 Directory (org.apache.lucene.store.Directory)185 RandomIndexWriter (org.apache.lucene.index.RandomIndexWriter)149 IndexWriter (org.apache.lucene.index.IndexWriter)124 Term (org.apache.lucene.index.Term)123 MockAnalyzer (org.apache.lucene.analysis.MockAnalyzer)98 IndexWriterConfig (org.apache.lucene.index.IndexWriterConfig)86 IndexSearcher (org.apache.lucene.search.IndexSearcher)84 StringField (org.apache.lucene.document.StringField)59 Test (org.junit.Test)51 Analyzer (org.apache.lucene.analysis.Analyzer)50 BytesRef (org.apache.lucene.util.BytesRef)50 LeafReader (org.apache.lucene.index.LeafReader)47 ArrayList (java.util.ArrayList)44 Field (org.apache.lucene.document.Field)44 LeafReaderContext (org.apache.lucene.index.LeafReaderContext)41 TermQuery (org.apache.lucene.search.TermQuery)35 TextField (org.apache.lucene.document.TextField)32 MatchAllDocsQuery (org.apache.lucene.search.MatchAllDocsQuery)32