use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class SaveWikiPageTaskTest method testSaveWikiPageTaskWithVersioningFileProvider.
@Test
public void testSaveWikiPageTaskWithVersioningFileProvider() throws Exception {
final TestEngine engine = TestEngine.build(with("jspwiki.pageProvider", "VersioningFileProvider"), with("jspwiki.lucene.initialdelay", "0"), with("jspwiki.lucene.indexdelay", "1"));
final String pageName = "TestSaveWikiPageTestPageWithVersioningFileProvider";
final Page page = Wiki.contents().page(engine, pageName);
engine.saveText(pageName, "initial text on first revision");
final Context context = Wiki.context().create(engine, engine.newHttpRequest(), page);
final SaveWikiPageTask task = new SaveWikiPageTask();
task.setWorkflow(1, new HashMap<>());
task.getWorkflowContext().put(WorkflowManager.WF_WP_SAVE_FACT_PROPOSED_TEXT, "It is based on ties of history, culture, language, ethnicity, kinship and geography");
final Collection<SearchResult> res = new ArrayList<>();
Assertions.assertEquals(Outcome.STEP_COMPLETE, task.execute(context));
Awaitility.await("ensure latest version of page gets indexed").until(findsResultsFor(context, res, "kinship"));
Assertions.assertEquals(1, res.size(), "no pages found");
Assertions.assertEquals(pageName, res.iterator().next().getPage().getName(), "page");
}
use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class KendraSearchProvider method findPages.
/**
* {@inheritDoc}
*/
@Override
public Collection<SearchResult> findPages(final String query, final Context wikiContext) throws ProviderException, IOException {
final QueryRequest request = new QueryRequest().withIndexId(indexId).withQueryText(query);
final List<QueryResultItem> items;
try {
items = getKendra().query(request).getResultItems();
} catch (final ThrottlingException e) {
LOG.error("ThrottlingException. Skipping...");
return new ArrayList<>();
}
final List<SearchResult> searchResults = new ArrayList<>(items.size());
final AuthorizationManager mgr = engine.getManager(AuthorizationManager.class);
for (final QueryResultItem item : items) {
switch(QueryResultType.fromValue(item.getType())) {
case DOCUMENT:
final String documentId = item.getDocumentId();
final String documentExcerpt = item.getDocumentExcerpt().getText();
final String scoreConfidence = item.getScoreAttributes().getScoreConfidence();
final Page page = this.engine.getManager(PageManager.class).getPage(documentId, PageProvider.LATEST_VERSION);
if (page != null) {
final PagePermission pp = new PagePermission(page, PagePermission.VIEW_ACTION);
if (mgr.checkPermission(wikiContext.getWikiSession(), pp)) {
final SearchResult searchResult = new SearchResultImpl(page, confidence2score(scoreConfidence), new String[] { documentExcerpt });
searchResults.add(searchResult);
} else {
LOG.error(format("Page '%s' is not accessible", documentId));
}
} else {
LOG.error(format("Kendra found a result page '%s' that could not be loaded, removing from index", documentId));
pageRemoved(Wiki.contents().page(this.engine, documentId));
}
break;
default:
LOG.error(format("Unknown query result type: %s", item.getType()));
}
}
return searchResults;
}
use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class WikiPageAdapterProvider method findPages.
/**
* {@inheritDoc}
*/
@Override
public Collection<SearchResult> findPages(final QueryItem[] query) {
final org.apache.wiki.search.QueryItem[] queryItems = Arrays.stream(query).map(SearchAdapter::oldQueryItemfrom).toArray(org.apache.wiki.search.QueryItem[]::new);
final Collection<org.apache.wiki.search.SearchResult> results = provider.findPages(queryItems);
return results.stream().map(SearchAdapter::newSearchResultFrom).collect(Collectors.toCollection(() -> new TreeSet<>(new SearchResultComparator())));
}
use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class AbstractFileProvider method findPages.
/**
* Iterates through all WikiPages, matches them against the given query, and returns a Collection of SearchResult objects.
*
* {@inheritDoc}
*/
@Override
public Collection<SearchResult> findPages(final QueryItem[] query) {
final File wikipagedir = new File(m_pageDirectory);
final TreeSet<SearchResult> res = new TreeSet<>(new SearchResultComparator());
final SearchMatcher matcher = new SearchMatcher(m_engine, query);
final File[] wikipages = wikipagedir.listFiles(new WikiFileFilter());
if (wikipages != null) {
for (final File wikipage : wikipages) {
final String filename = wikipage.getName();
final int cutpoint = filename.lastIndexOf(FILE_EXT);
final String wikiname = unmangleName(filename.substring(0, cutpoint));
try (final InputStream input = Files.newInputStream(wikipage.toPath())) {
final String pagetext = FileUtil.readContents(input, m_encoding);
final SearchResult comparison = matcher.matchPageContent(wikiname, pagetext);
if (comparison != null) {
res.add(comparison);
}
} catch (final IOException e) {
log.error("Failed to read " + filename, e);
}
}
}
return res;
}
use of org.apache.wiki.api.search.SearchResult in project jspwiki by apache.
the class Search method renderResults.
private String renderResults(final Collection<SearchResult> results, final Context context, final int maxItems) {
final Engine engine = context.getEngine();
final Element table = XhtmlUtil.element(XHTML.table);
// table.setAttribute(XHTML.ATTR_border,"0");
// table.setAttribute(XHTML.ATTR_cellpadding,"4");
table.setAttribute(XHTML.ATTR_class, "wikitable search-result");
Element row = XhtmlUtil.element(XHTML.tr);
table.addContent(row);
final Element th1 = XhtmlUtil.element(XHTML.th, "Page");
th1.setAttribute(XHTML.ATTR_width, "30%");
th1.setAttribute(XHTML.ATTR_align, "left");
row.addContent(th1);
final Element th2 = XhtmlUtil.element(XHTML.th, "Score");
th2.setAttribute(XHTML.ATTR_align, "left");
row.addContent(th2);
int idx = 0;
for (final Iterator<SearchResult> i = results.iterator(); i.hasNext() && idx++ <= maxItems; ) {
final SearchResult sr = i.next();
row = XhtmlUtil.element(XHTML.tr);
final Element name = XhtmlUtil.element(XHTML.td);
name.setAttribute(XHTML.ATTR_width, "30%");
name.addContent(XhtmlUtil.link(context.getURL(ContextEnum.PAGE_VIEW.getRequestContext(), sr.getPage().getName()), engine.getManager(RenderingManager.class).beautifyTitle(sr.getPage().getName())));
row.addContent(name);
row.addContent(XhtmlUtil.element(XHTML.td, "" + sr.getScore()));
table.addContent(row);
}
if (results.isEmpty()) {
row = XhtmlUtil.element(XHTML.tr);
final Element td = XhtmlUtil.element(XHTML.td);
td.setAttribute(XHTML.ATTR_colspan, "2");
final Element b = XhtmlUtil.element(XHTML.b, "No results");
td.addContent(b);
row.addContent(td);
table.addContent(row);
}
return XhtmlUtil.serialize(table);
}
Aggregations