use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class SearchManagerTest method testSimpleSearch3.
@Test
public void testSimpleSearch3() throws Exception {
final String txt = "It was the dawn of the third age of mankind, ten years after the Earth-Minbari War.";
final MockHttpServletRequest request = m_engine.newHttpRequest();
request.getParameterMap().put("page", new String[] { "TestPage" });
final Context ctx = Wiki.context().create(m_engine, request, ContextEnum.PAGE_EDIT.getRequestContext());
m_engine.getManager(PageManager.class).saveText(ctx, txt);
m_engine.getManager(PageManager.class).saveText(ctx, "The Babylon Project was a dream given form. Its goal: to prevent another war by creating a place where humans and aliens could work out their differences peacefully.");
Collection<SearchResult> res = new ArrayList<>();
Awaitility.await("testSimpleSearch3").until(findsResultsFor(res, "Babylon"));
// check for text present in 1st m_engine.saveText() but not in 2nd
res = m_mgr.findPages("mankind", ctx);
Assertions.assertEquals(0, res.size(), "empty results");
Awaitility.await("testSimpleSearch3").until(findsResultsFor(res, "Babylon"));
Assertions.assertNotNull(res, "null result");
Assertions.assertEquals(1, res.size(), "no pages");
Assertions.assertEquals("TestPage", res.iterator().next().getPage().getName(), "page");
m_engine.deleteTestPage("TestPage");
}
use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class LuceneSearchProvider method findPages.
/**
* Searches pages using a particular combination of flags.
*
* @param query The query to perform in Lucene query language
* @param flags A set of flags
* @return A Collection of SearchResult instances
* @throws ProviderException if there is a problem with the backend
*/
public Collection<SearchResult> findPages(final String query, final int flags, final Context wikiContext) throws ProviderException {
ArrayList<SearchResult> list = null;
Highlighter highlighter = null;
try (final Directory luceneDir = new NIOFSDirectory(new File(m_luceneDirectory).toPath());
final IndexReader reader = DirectoryReader.open(luceneDir)) {
final String[] queryfields = { LUCENE_PAGE_CONTENTS, LUCENE_PAGE_NAME, LUCENE_AUTHOR, LUCENE_ATTACHMENTS, LUCENE_PAGE_KEYWORDS };
final QueryParser qp = new MultiFieldQueryParser(queryfields, getLuceneAnalyzer());
final Query luceneQuery = qp.parse(query);
final IndexSearcher searcher = new IndexSearcher(reader, searchExecutor);
if ((flags & FLAG_CONTEXTS) != 0) {
highlighter = new Highlighter(new SimpleHTMLFormatter("<span class=\"searchmatch\">", "</span>"), new SimpleHTMLEncoder(), new QueryScorer(luceneQuery));
}
final ScoreDoc[] hits = searcher.search(luceneQuery, MAX_SEARCH_HITS).scoreDocs;
final AuthorizationManager mgr = m_engine.getManager(AuthorizationManager.class);
list = new ArrayList<>(hits.length);
for (final ScoreDoc hit : hits) {
final int docID = hit.doc;
final Document doc = searcher.doc(docID);
final String pageName = doc.get(LUCENE_ID);
final Page page = m_engine.getManager(PageManager.class).getPage(pageName, PageProvider.LATEST_VERSION);
if (page != null) {
final PagePermission pp = new PagePermission(page, PagePermission.VIEW_ACTION);
if (mgr.checkPermission(wikiContext.getWikiSession(), pp)) {
final int score = (int) (hit.score * 100);
// Get highlighted search contexts
final String text = doc.get(LUCENE_PAGE_CONTENTS);
String[] fragments = new String[0];
if (text != null && highlighter != null) {
final TokenStream tokenStream = getLuceneAnalyzer().tokenStream(LUCENE_PAGE_CONTENTS, new StringReader(text));
fragments = highlighter.getBestFragments(tokenStream, text, MAX_FRAGMENTS);
}
final SearchResult result = new SearchResultImpl(page, score, fragments);
list.add(result);
}
} else {
log.error("Lucene found a result page '" + pageName + "' that could not be loaded, removing from Lucene cache");
pageRemoved(Wiki.contents().page(m_engine, pageName));
}
}
} catch (final IOException e) {
log.error("Failed during lucene search", e);
} catch (final ParseException e) {
log.info("Broken query; cannot parse query: " + query, e);
throw new ProviderException("You have entered a query Lucene cannot process [" + query + "]: " + e.getMessage());
} catch (final InvalidTokenOffsetsException e) {
log.error("Tokens are incompatible with provided text ", e);
}
return list;
}
Aggregations