use of org.apache.lucene.search.TopDocs in project crate by crate.
the class BooleanColumnReferenceTest method testBooleanExpression.
@Test
public void testBooleanExpression() throws Exception {
BooleanColumnReference booleanColumn = new BooleanColumnReference(column);
booleanColumn.startCollect(ctx);
booleanColumn.setNextReader(readerContext);
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20);
int i = 0;
for (ScoreDoc doc : topDocs.scoreDocs) {
booleanColumn.setNextDocId(doc.doc);
assertThat(booleanColumn.value(), is(i % 2 == 0));
i++;
}
}
use of org.apache.lucene.search.TopDocs in project crate by crate.
the class ByteColumnReferenceTest method testByteExpression.
@Test
public void testByteExpression() throws Exception {
ByteColumnReference byteColumn = new ByteColumnReference(column);
byteColumn.startCollect(ctx);
byteColumn.setNextReader(readerContext);
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20);
byte b = -10;
for (ScoreDoc doc : topDocs.scoreDocs) {
byteColumn.setNextDocId(doc.doc);
assertThat(byteColumn.value(), is(b));
b++;
}
}
use of org.apache.lucene.search.TopDocs in project crate by crate.
the class ShortColumnReferenceTest method testShortExpression.
@Test
public void testShortExpression() throws Exception {
ShortColumnReference shortColumn = new ShortColumnReference(column);
shortColumn.startCollect(ctx);
shortColumn.setNextReader(readerContext);
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20);
short i = -10;
for (ScoreDoc doc : topDocs.scoreDocs) {
shortColumn.setNextDocId(doc.doc);
assertThat(shortColumn.value(), is(i));
i++;
}
}
use of org.apache.lucene.search.TopDocs in project elasticsearch by elastic.
the class PercolateQueryTests method testPercolateQuery.
public void testPercolateQuery() throws Exception {
List<Iterable<? extends IndexableField>> docs = new ArrayList<>();
List<Query> queries = new ArrayList<>();
PercolateQuery.QueryStore queryStore = ctx -> queries::get;
queries.add(new TermQuery(new Term("field", "fox")));
docs.add(Collections.singleton(new StringField("select", "a", Field.Store.NO)));
SpanNearQuery.Builder snp = new SpanNearQuery.Builder("field", true);
snp.addClause(new SpanTermQuery(new Term("field", "jumps")));
snp.addClause(new SpanTermQuery(new Term("field", "lazy")));
snp.addClause(new SpanTermQuery(new Term("field", "dog")));
snp.setSlop(2);
queries.add(snp.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
PhraseQuery.Builder pq1 = new PhraseQuery.Builder();
pq1.add(new Term("field", "quick"));
pq1.add(new Term("field", "brown"));
pq1.add(new Term("field", "jumps"));
pq1.setSlop(1);
queries.add(pq1.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
BooleanQuery.Builder bq1 = new BooleanQuery.Builder();
bq1.add(new TermQuery(new Term("field", "quick")), BooleanClause.Occur.MUST);
bq1.add(new TermQuery(new Term("field", "brown")), BooleanClause.Occur.MUST);
bq1.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST);
queries.add(bq1.build());
docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO)));
indexWriter.addDocuments(docs);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer());
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
// no scoring, wrapping it in a constant score query:
Query query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("a"), new TermQuery(new Term("select", "a")), percolateSearcher, new MatchNoDocsQuery("")));
TopDocs topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(1));
assertThat(topDocs.scoreDocs.length, equalTo(1));
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
Explanation explanation = shardSearcher.explain(query, 0);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("b"), new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery("")));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs.length, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
explanation = shardSearcher.explain(query, 1);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score));
assertThat(topDocs.scoreDocs[2].doc, equalTo(3));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score));
query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("c"), new MatchAllDocsQuery(), percolateSearcher, new MatchAllDocsQuery()));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(4));
query = new PercolateQuery("type", queryStore, new BytesArray("{}"), new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery(""));
topDocs = shardSearcher.search(query, 10);
assertThat(topDocs.totalHits, equalTo(3));
assertThat(topDocs.scoreDocs.length, equalTo(3));
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
explanation = shardSearcher.explain(query, 3);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
assertThat(topDocs.scoreDocs[1].doc, equalTo(2));
explanation = shardSearcher.explain(query, 2);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
assertThat(topDocs.scoreDocs[2].doc, equalTo(1));
explanation = shardSearcher.explain(query, 1);
assertThat(explanation.isMatch(), is(true));
assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score));
assertThat(explanation.getDetails(), arrayWithSize(1));
}
use of org.apache.lucene.search.TopDocs in project elasticsearch by elastic.
the class PercolateQuery method createWeight.
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, false);
final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, false);
return new Weight(this) {
@Override
public void extractTerms(Set<Term> set) {
}
@Override
public Explanation explain(LeafReaderContext leafReaderContext, int docId) throws IOException {
Scorer scorer = scorer(leafReaderContext);
if (scorer != null) {
TwoPhaseIterator twoPhaseIterator = scorer.twoPhaseIterator();
int result = twoPhaseIterator.approximation().advance(docId);
if (result == docId) {
if (twoPhaseIterator.matches()) {
if (needsScores) {
CheckedFunction<Integer, Query, IOException> percolatorQueries = queryStore.getQueries(leafReaderContext);
Query query = percolatorQueries.apply(docId);
Explanation detail = percolatorIndexSearcher.explain(query, 0);
return Explanation.match(scorer.score(), "PercolateQuery", detail);
} else {
return Explanation.match(scorer.score(), "PercolateQuery");
}
}
}
}
return Explanation.noMatch("PercolateQuery");
}
@Override
public float getValueForNormalization() throws IOException {
return candidateMatchesWeight.getValueForNormalization();
}
@Override
public void normalize(float v, float v1) {
candidateMatchesWeight.normalize(v, v1);
}
@Override
public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException {
final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext);
if (approximation == null) {
return null;
}
final CheckedFunction<Integer, Query, IOException> queries = queryStore.getQueries(leafReaderContext);
if (needsScores) {
return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) {
float score;
@Override
boolean matchDocId(int docId) throws IOException {
Query query = percolatorQueries.apply(docId);
if (query != null) {
TopDocs topDocs = percolatorIndexSearcher.search(query, 1);
if (topDocs.totalHits > 0) {
score = topDocs.scoreDocs[0].score;
return true;
} else {
return false;
}
} else {
return false;
}
}
@Override
public float score() throws IOException {
return score;
}
};
} else {
Scorer verifiedDocsScorer = verifiedMatchesWeight.scorer(leafReaderContext);
Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer);
return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) {
@Override
public float score() throws IOException {
return 0f;
}
boolean matchDocId(int docId) throws IOException {
// the MemoryIndex verification.
if (verifiedDocsBits.get(docId)) {
return true;
}
Query query = percolatorQueries.apply(docId);
return query != null && Lucene.exists(percolatorIndexSearcher, query);
}
};
}
}
};
}
Aggregations