use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class ScaledFloatFieldTypeTests method testFieldData.
public void testFieldData() throws IOException {
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
ft.setScalingFactor(0.1 + randomDouble() * 100);
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
Document doc = new Document();
doc.add(new SortedNumericDocValuesField("scaled_float1", 10));
doc.add(new SortedNumericDocValuesField("scaled_float2", 5));
doc.add(new SortedNumericDocValuesField("scaled_float2", 12));
w.addDocument(doc);
try (DirectoryReader reader = DirectoryReader.open(w)) {
IndexMetaData indexMetadata = new IndexMetaData.Builder("index").settings(Settings.builder().put("index.version.created", Version.CURRENT).put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build()).build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY);
// single-valued
ft.setName("scaled_float1");
IndexNumericFieldData fielddata = (IndexNumericFieldData) ft.fielddataBuilder().build(indexSettings, ft, null, null, null);
assertEquals(fielddata.getNumericType(), IndexNumericFieldData.NumericType.DOUBLE);
AtomicNumericFieldData leafFieldData = fielddata.load(reader.leaves().get(0));
SortedNumericDoubleValues values = leafFieldData.getDoubleValues();
values.setDocument(0);
assertEquals(1, values.count());
assertEquals(10 / ft.getScalingFactor(), values.valueAt(0), 10e-5);
// multi-valued
ft.setName("scaled_float2");
fielddata = (IndexNumericFieldData) ft.fielddataBuilder().build(indexSettings, ft, null, null, null);
leafFieldData = fielddata.load(reader.leaves().get(0));
values = leafFieldData.getDoubleValues();
values.setDocument(0);
assertEquals(2, values.count());
assertEquals(5 / ft.getScalingFactor(), values.valueAt(0), 10e-5);
assertEquals(12 / ft.getScalingFactor(), values.valueAt(1), 10e-5);
}
IOUtils.close(w, dir);
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class NestedSortingTests method testNestedSorting.
public void testNestedSorting() throws Exception {
List<Document> docs = new ArrayList<>();
Document document = new Document();
document.add(new StringField("field2", "a", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "b", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "c", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "a", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
docs.clear();
document = new Document();
document.add(new StringField("field2", "c", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "d", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "e", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "b", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "e", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "f", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "g", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "c", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "g", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "h", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "i", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "d", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
docs.clear();
document = new Document();
document.add(new StringField("field2", "i", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "j", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "k", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "f", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "k", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "l", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "m", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "g", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
// This doc will not be included, because it doesn't have nested docs
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "h", Field.Store.NO));
writer.addDocument(document);
docs.clear();
document = new Document();
document.add(new StringField("field2", "m", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "n", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "o", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "i", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
// Some garbage docs, just to check if the NestedFieldComparator can deal with this.
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
MultiValueMode sortMode = MultiValueMode.MIN;
DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData = getForField("field2");
Query parentFilter = new TermQuery(new Term("__type", "parent"));
Query childFilter = Queries.not(parentFilter);
BytesRefFieldComparatorSource nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
ToParentBlockJoinQuery query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None);
Sort sort = new Sort(new SortField("field2", nestedComparatorSource));
TopFieldDocs topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(7));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(7));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("c"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("e"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(19));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("i"));
sortMode = MultiValueMode.MAX;
nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
sort = new Sort(new SortField("field2", nestedComparatorSource, true));
topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(7));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(28));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(23));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(19));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("k"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("i"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("g"));
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(parentFilter, Occur.MUST_NOT);
bq.add(new TermQuery(new Term("filter_1", "T")), Occur.MUST);
childFilter = bq.build();
nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None);
sort = new Sort(new SortField("field2", nestedComparatorSource, true));
topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(6));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(23));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(28));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(7));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("e"));
searcher.getIndexReader().close();
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndexSearcherWrapperTests method testReaderCloseListenerIsCalled.
public void testReaderCloseListenerIsCalled() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
final AtomicInteger closeCalls = new AtomicInteger(0);
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
@Override
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
return new FieldMaskingReader("field", reader, closeCalls);
}
@Override
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
return searcher;
}
};
final int sourceRefCount = open.getRefCount();
final AtomicInteger count = new AtomicInteger();
final AtomicInteger outerCount = new AtomicInteger();
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) {
final Engine.Searcher wrap = wrapper.wrap(engineSearcher);
assertEquals(1, wrap.reader().getRefCount());
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), reader -> {
if (reader == open) {
count.incrementAndGet();
}
outerCount.incrementAndGet();
});
assertEquals(0, wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1).totalHits);
wrap.close();
assertFalse("wrapped reader is closed", wrap.reader().tryIncRef());
assertEquals(sourceRefCount, open.getRefCount());
}
assertEquals(1, closeCalls.get());
IOUtils.close(open, writer, dir);
assertEquals(1, outerCount.get());
assertEquals(1, count.get());
assertEquals(0, open.getRefCount());
assertEquals(1, closeCalls.get());
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndexSearcherWrapperTests method testIsCacheable.
public void testIsCacheable() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
final AtomicInteger closeCalls = new AtomicInteger(0);
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
@Override
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
return new FieldMaskingReader("field", reader, closeCalls);
}
@Override
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
return searcher;
}
};
final ConcurrentHashMap<Object, TopDocs> cache = new ConcurrentHashMap<>();
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) {
try (Engine.Searcher wrap = wrapper.wrap(engineSearcher)) {
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), reader -> {
cache.remove(reader.getCoreCacheKey());
});
TopDocs search = wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1);
cache.put(wrap.reader().getCoreCacheKey(), search);
}
}
assertEquals(1, closeCalls.get());
assertEquals(1, cache.size());
IOUtils.close(open, writer, dir);
assertEquals(0, cache.size());
assertEquals(1, closeCalls.get());
}
use of org.apache.lucene.index.DirectoryReader in project elasticsearch by elastic.
the class IndexSearcherWrapperTests method testNoWrap.
public void testNoWrap() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
searcher.setSimilarity(iwc.getSimilarity());
IndexSearcherWrapper wrapper = new IndexSearcherWrapper();
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) {
final Engine.Searcher wrap = wrapper.wrap(engineSearcher);
assertSame(wrap, engineSearcher);
}
IOUtils.close(open, writer, dir);
}
Aggregations