use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class ShardUtilsTests method testExtractShardId.
public void testExtractShardId() throws IOException {
BaseDirectoryWrapper dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.commit();
ShardId id = new ShardId("foo", "_na_", random().nextInt());
try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap));
}
final int numDocs = 1 + random().nextInt(5);
for (int i = 0; i < numDocs; i++) {
Document d = new Document();
d.add(newField("name", "foobar", StringField.TYPE_STORED));
writer.addDocument(d);
if (random().nextBoolean()) {
writer.commit();
}
}
try (DirectoryReader reader = DirectoryReader.open(writer)) {
ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
assertEquals(id, ShardUtils.extractShardId(wrap));
CompositeReaderContext context = wrap.getContext();
for (LeafReaderContext leaf : context.leaves()) {
assertEquals(id, ShardUtils.extractShardId(leaf.reader()));
}
}
IOUtils.close(writer, dir);
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class NestedSortingTests method testDuel.
public void testDuel() throws Exception {
final int numDocs = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < numDocs; ++i) {
final int numChildren = randomInt(2);
List<Document> docs = new ArrayList<>(numChildren + 1);
for (int j = 0; j < numChildren; ++j) {
Document doc = new Document();
doc.add(new StringField("f", TestUtil.randomSimpleString(random(), 2), Field.Store.NO));
doc.add(new StringField("__type", "child", Field.Store.NO));
docs.add(doc);
}
if (randomBoolean()) {
docs.add(new Document());
}
Document parent = new Document();
parent.add(new StringField("__type", "parent", Field.Store.NO));
docs.add(parent);
writer.addDocuments(docs);
if (rarely()) {
// we need to have a bit more segments than what RandomIndexWriter would do by default
DirectoryReader.open(writer).close();
}
}
writer.commit();
MultiValueMode sortMode = randomFrom(Arrays.asList(MultiValueMode.MIN, MultiValueMode.MAX));
DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData1 = getForField("f");
IndexFieldData<?> indexFieldData2 = NoOrdinalsStringFieldDataTests.hideOrdinals(indexFieldData1);
final String missingValue = randomBoolean() ? null : TestUtil.randomSimpleString(random(), 2);
final int n = randomIntBetween(1, numDocs + 2);
final boolean reverse = randomBoolean();
final TopDocs topDocs1 = getTopDocs(searcher, indexFieldData1, missingValue, sortMode, n, reverse);
final TopDocs topDocs2 = getTopDocs(searcher, indexFieldData2, missingValue, sortMode, n, reverse);
for (int i = 0; i < topDocs1.scoreDocs.length; ++i) {
final FieldDoc fieldDoc1 = (FieldDoc) topDocs1.scoreDocs[i];
final FieldDoc fieldDoc2 = (FieldDoc) topDocs2.scoreDocs[i];
assertEquals(fieldDoc1.doc, fieldDoc2.doc);
assertArrayEquals(fieldDoc1.fields, fieldDoc2.fields);
}
searcher.getIndexReader().close();
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndicesRequestCacheTests method testInvalidate.
public void testInvalidate() throws Exception {
ShardRequestCache requestCacheStats = new ShardRequestCache();
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
AtomicBoolean indexShard = new AtomicBoolean(true);
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(1, cache.count());
// cache hit
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
// load again after invalidate
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
cache.invalidate(entity, reader, termQuery.buildAsBytes());
value = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
// release
if (randomBoolean()) {
reader.close();
} else {
// closed shard but reader is still open
indexShard.set(false);
cache.clear(entity);
}
cache.cleanCache();
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertEquals(0, cache.count());
assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt());
IOUtils.close(reader, writer, dir, cache);
assertEquals(0, cache.numRegisteredCloseListeners());
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndicesRequestCacheTests method testClearAllEntityIdentity.
public void testClearAllEntityIdentity() throws Exception {
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
AtomicBoolean indexShard = new AtomicBoolean(true);
ShardRequestCache requestCacheStats = new ShardRequestCache();
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
AtomicBoolean differentIdentity = new AtomicBoolean(true);
TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
assertEquals("baz", value3.streamInput().readString());
assertEquals(3, cache.count());
final long hitCount = requestCacheStats.stats().getHitCount();
// clear all for the indexShard Idendity even though is't still open
cache.clear(randomFrom(entity, secondEntity));
cache.cleanCache();
assertEquals(1, cache.count());
// third has not been validated since it's a different identity
value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount());
assertEquals("baz", value3.streamInput().readString());
IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache);
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndicesRequestCacheTests method testEviction.
public void testEviction() throws Exception {
final ByteSizeValue size;
{
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
AtomicBoolean indexShard = new AtomicBoolean(true);
ShardRequestCache requestCacheStats = new ShardRequestCache();
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
assertEquals("bar", value2.streamInput().readString());
size = requestCacheStats.stats().getMemorySize();
IOUtils.close(reader, secondReader, writer, dir, cache);
}
IndicesRequestCache cache = new IndicesRequestCache(Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build());
AtomicBoolean indexShard = new AtomicBoolean(true);
ShardRequestCache requestCacheStats = new ShardRequestCache();
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termQuery.buildAsBytes());
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termQuery.buildAsBytes());
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termQuery.buildAsBytes());
assertEquals("baz", value3.streamInput().readString());
assertEquals(2, cache.count());
assertEquals(1, requestCacheStats.stats().getEvictions());
IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache);
}
Aggregations