use of org.apache.lucene.search.TermQuery in project elasticsearch by elastic.
the class NestedSortingTests method getTopDocs.
private TopDocs getTopDocs(IndexSearcher searcher, IndexFieldData<?> indexFieldData, String missingValue, MultiValueMode sortMode, int n, boolean reverse) throws IOException {
Query parentFilter = new TermQuery(new Term("__type", "parent"));
Query childFilter = new TermQuery(new Term("__type", "child"));
XFieldComparatorSource nestedComparatorSource = indexFieldData.comparatorSource(missingValue, sortMode, createNested(searcher, parentFilter, childFilter));
Query query = new ConstantScoreQuery(parentFilter);
Sort sort = new Sort(new SortField("f", nestedComparatorSource, reverse));
return searcher.search(query, n, sort);
}
use of org.apache.lucene.search.TermQuery in project elasticsearch by elastic.
the class NestedSortingTests method testNestedSorting.
public void testNestedSorting() throws Exception {
List<Document> docs = new ArrayList<>();
Document document = new Document();
document.add(new StringField("field2", "a", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "b", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "c", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "a", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
docs.clear();
document = new Document();
document.add(new StringField("field2", "c", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "d", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "e", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "b", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "e", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "f", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "g", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "c", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "g", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "h", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "i", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "d", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
docs.clear();
document = new Document();
document.add(new StringField("field2", "i", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "j", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "k", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "f", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
docs.clear();
document = new Document();
document.add(new StringField("field2", "k", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "l", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "m", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "g", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
// This doc will not be included, because it doesn't have nested docs
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "h", Field.Store.NO));
writer.addDocument(document);
docs.clear();
document = new Document();
document.add(new StringField("field2", "m", Field.Store.NO));
document.add(new StringField("filter_1", "T", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "n", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("field2", "o", Field.Store.NO));
document.add(new StringField("filter_1", "F", Field.Store.NO));
docs.add(document);
document = new Document();
document.add(new StringField("__type", "parent", Field.Store.NO));
document.add(new StringField("field1", "i", Field.Store.NO));
docs.add(document);
writer.addDocuments(docs);
writer.commit();
// Some garbage docs, just to check if the NestedFieldComparator can deal with this.
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
document = new Document();
document.add(new StringField("fieldXXX", "x", Field.Store.NO));
writer.addDocument(document);
MultiValueMode sortMode = MultiValueMode.MIN;
DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
PagedBytesIndexFieldData indexFieldData = getForField("field2");
Query parentFilter = new TermQuery(new Term("__type", "parent"));
Query childFilter = Queries.not(parentFilter);
BytesRefFieldComparatorSource nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
ToParentBlockJoinQuery query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None);
Sort sort = new Sort(new SortField("field2", nestedComparatorSource));
TopFieldDocs topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(7));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(7));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("c"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("e"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(19));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("i"));
sortMode = MultiValueMode.MAX;
nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
sort = new Sort(new SortField("field2", nestedComparatorSource, true));
topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(7));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(28));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(23));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(19));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("k"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("i"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("g"));
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(parentFilter, Occur.MUST_NOT);
bq.add(new TermQuery(new Term("filter_1", "T")), Occur.MUST);
childFilter = bq.build();
nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter));
query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None);
sort = new Sort(new SortField("field2", nestedComparatorSource, true));
topDocs = searcher.search(query, 5, sort);
assertThat(topDocs.totalHits, equalTo(6));
assertThat(topDocs.scoreDocs.length, equalTo(5));
assertThat(topDocs.scoreDocs[0].doc, equalTo(23));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[1].doc, equalTo(28));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[1]).fields[0]).utf8ToString(), equalTo("m"));
assertThat(topDocs.scoreDocs[2].doc, equalTo(11));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[2]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[3].doc, equalTo(15));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[3]).fields[0]).utf8ToString(), equalTo("g"));
assertThat(topDocs.scoreDocs[4].doc, equalTo(7));
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("e"));
searcher.getIndexReader().close();
}
use of org.apache.lucene.search.TermQuery in project elasticsearch by elastic.
the class ElasticsearchQueryCachingPolicyTests method testDoesNotCacheTermQueries.
public void testDoesNotCacheTermQueries() throws IOException {
QueryCachingPolicy policy = QueryCachingPolicy.ALWAYS_CACHE;
assertTrue(policy.shouldCache(new TermQuery(new Term("foo", "bar"))));
assertTrue(policy.shouldCache(new PhraseQuery("foo", "bar", "baz")));
policy = new ElasticsearchQueryCachingPolicy(policy);
assertFalse(policy.shouldCache(new TermQuery(new Term("foo", "bar"))));
assertTrue(policy.shouldCache(new PhraseQuery("foo", "bar", "baz")));
}
use of org.apache.lucene.search.TermQuery in project elasticsearch by elastic.
the class ElasticsearchQueryCachingPolicyTests method testDoesNotPutTermQueriesIntoTheHistory.
public void testDoesNotPutTermQueriesIntoTheHistory() {
boolean[] used = new boolean[1];
QueryCachingPolicy policy = new QueryCachingPolicy() {
@Override
public boolean shouldCache(Query query) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void onUse(Query query) {
used[0] = true;
}
};
policy = new ElasticsearchQueryCachingPolicy(policy);
policy.onUse(new TermQuery(new Term("foo", "bar")));
assertFalse(used[0]);
policy.onUse(new PhraseQuery("foo", "bar", "baz"));
assertTrue(used[0]);
}
use of org.apache.lucene.search.TermQuery in project elasticsearch by elastic.
the class IndexSearcherWrapperTests method testReaderCloseListenerIsCalled.
public void testReaderCloseListenerIsCalled() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1));
IndexSearcher searcher = new IndexSearcher(open);
assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits);
final AtomicInteger closeCalls = new AtomicInteger(0);
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
@Override
public DirectoryReader wrap(DirectoryReader reader) throws IOException {
return new FieldMaskingReader("field", reader, closeCalls);
}
@Override
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
return searcher;
}
};
final int sourceRefCount = open.getRefCount();
final AtomicInteger count = new AtomicInteger();
final AtomicInteger outerCount = new AtomicInteger();
try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) {
final Engine.Searcher wrap = wrapper.wrap(engineSearcher);
assertEquals(1, wrap.reader().getRefCount());
ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), reader -> {
if (reader == open) {
count.incrementAndGet();
}
outerCount.incrementAndGet();
});
assertEquals(0, wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1).totalHits);
wrap.close();
assertFalse("wrapped reader is closed", wrap.reader().tryIncRef());
assertEquals(sourceRefCount, open.getRefCount());
}
assertEquals(1, closeCalls.get());
IOUtils.close(open, writer, dir);
assertEquals(1, outerCount.get());
assertEquals(1, count.get());
assertEquals(0, open.getRefCount());
assertEquals(1, closeCalls.get());
}
Aggregations