use of org.apache.lucene.search.PhraseQuery.Builder in project lucene-solr by apache.
the class SynonymTokenizer method testSimpleSpanFragmenter.
public void testSimpleSpanFragmenter() throws Exception {
Builder builder = new PhraseQuery.Builder();
builder.add(new Term(FIELD_NAME, "piece"), 0);
builder.add(new Term(FIELD_NAME, "text"), 2);
builder.add(new Term(FIELD_NAME, "very"), 5);
builder.add(new Term(FIELD_NAME, "long"), 6);
PhraseQuery phraseQuery = builder.build();
doSearching(phraseQuery);
int maxNumFragmentsRequired = 2;
QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
Highlighter highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits; i++) {
final int docId = hits.scoreDocs[i].doc;
final Document doc = searcher.doc(docId);
String text = doc.get(FIELD_NAME);
TokenStream tokenStream = getAnyTokenStream(FIELD_NAME, docId);
highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer, 5));
String result = highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
if (VERBOSE)
System.out.println("\t" + result);
}
phraseQuery = new PhraseQuery(FIELD_NAME, "been", "shot");
doSearching(query);
maxNumFragmentsRequired = 2;
scorer = new QueryScorer(query, FIELD_NAME);
highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits; i++) {
String text = searcher.doc(hits.scoreDocs[i].doc).get(FIELD_NAME);
TokenStream tokenStream = analyzer.tokenStream(FIELD_NAME, text);
highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer, 20));
String result = highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
if (VERBOSE)
System.out.println("\t" + result);
}
}
use of org.apache.lucene.search.PhraseQuery.Builder in project lucene-solr by apache.
the class SynonymTokenizer method testSimpleQueryScorerPhraseHighlighting.
public void testSimpleQueryScorerPhraseHighlighting() throws Exception {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.add(new Term(FIELD_NAME, "very"), 0);
builder.add(new Term(FIELD_NAME, "long"), 1);
builder.add(new Term(FIELD_NAME, "contains"), 3);
PhraseQuery phraseQuery = builder.build();
doSearching(phraseQuery);
int maxNumFragmentsRequired = 2;
QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
Highlighter highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits; i++) {
final int docId = hits.scoreDocs[i].doc;
final Document doc = searcher.doc(docId);
String text = doc.get(FIELD_NAME);
TokenStream tokenStream = getAnyTokenStream(FIELD_NAME, docId);
highlighter.setTextFragmenter(new SimpleFragmenter(40));
String result = highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
if (VERBOSE)
System.out.println("\t" + result);
}
assertTrue("Failed to find correct number of highlights " + numHighlights + " found", numHighlights == 3);
numHighlights = 0;
builder = new PhraseQuery.Builder();
builder.add(new Term(FIELD_NAME, "piece"), 1);
builder.add(new Term(FIELD_NAME, "text"), 3);
builder.add(new Term(FIELD_NAME, "refers"), 4);
builder.add(new Term(FIELD_NAME, "kennedy"), 6);
phraseQuery = builder.build();
doSearching(phraseQuery);
maxNumFragmentsRequired = 2;
scorer = new QueryScorer(query, FIELD_NAME);
highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits; i++) {
final int docId = hits.scoreDocs[i].doc;
final Document doc = searcher.doc(docId);
String text = doc.get(FIELD_NAME);
TokenStream tokenStream = getAnyTokenStream(FIELD_NAME, docId);
highlighter.setTextFragmenter(new SimpleFragmenter(40));
String result = highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
if (VERBOSE)
System.out.println("\t" + result);
}
assertTrue("Failed to find correct number of highlights " + numHighlights + " found", numHighlights == 4);
numHighlights = 0;
builder = new PhraseQuery.Builder();
builder.add(new Term(FIELD_NAME, "lets"), 0);
builder.add(new Term(FIELD_NAME, "lets"), 4);
builder.add(new Term(FIELD_NAME, "lets"), 8);
builder.add(new Term(FIELD_NAME, "lets"), 12);
phraseQuery = builder.build();
doSearching(phraseQuery);
maxNumFragmentsRequired = 2;
scorer = new QueryScorer(query, FIELD_NAME);
highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits; i++) {
final int docId = hits.scoreDocs[i].doc;
final Document doc = searcher.doc(docId);
String text = doc.get(FIELD_NAME);
TokenStream tokenStream = getAnyTokenStream(FIELD_NAME, docId);
highlighter.setTextFragmenter(new SimpleFragmenter(40));
String result = highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
if (VERBOSE)
System.out.println("\t" + result);
}
assertTrue("Failed to find correct number of highlights " + numHighlights + " found", numHighlights == 4);
}
Aggregations