use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class BasicSearchProvider method findPages.
private Collection<SearchResult> findPages(final QueryItem[] query, final Context wikiContext) {
final TreeSet<SearchResult> res = new TreeSet<>(new SearchResultComparator());
final SearchMatcher matcher = new SearchMatcher(m_engine, query);
final Collection<Page> allPages;
try {
allPages = m_engine.getManager(PageManager.class).getAllPages();
} catch (final ProviderException pe) {
log.error("Unable to retrieve page list", pe);
return null;
}
final AuthorizationManager mgr = m_engine.getManager(AuthorizationManager.class);
for (final Page page : allPages) {
try {
if (page != null) {
final PagePermission pp = new PagePermission(page, PagePermission.VIEW_ACTION);
if (wikiContext == null || mgr.checkPermission(wikiContext.getWikiSession(), pp)) {
final String pageName = page.getName();
final String pageContent = m_engine.getManager(PageManager.class).getPageText(pageName, PageProvider.LATEST_VERSION) + attachmentNames(page);
final SearchResult comparison = matcher.matchPageContent(pageName, pageContent);
if (comparison != null) {
res.add(comparison);
}
}
}
} catch (final ProviderException pe) {
log.error("Unable to retrieve page from cache", pe);
} catch (final IOException ioe) {
log.error("Failed to search page", ioe);
}
}
return res;
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class LuceneSearchProvider method doFullLuceneReindex.
/**
* Performs a full Lucene reindex, if necessary.
*
* @throws IOException If there's a problem during indexing
*/
protected void doFullLuceneReindex() throws IOException {
final File dir = new File(m_luceneDirectory);
final String[] filelist = dir.list();
if (filelist == null) {
throw new IOException("Invalid Lucene directory: cannot produce listing: " + dir.getAbsolutePath());
}
try {
if (filelist.length == 0) {
//
// No files? Reindex!
//
final Date start = new Date();
log.info("Starting Lucene reindexing, this can take a couple of minutes...");
final Directory luceneDir = new NIOFSDirectory(dir.toPath());
try (final IndexWriter writer = getIndexWriter(luceneDir)) {
final Collection<Page> allPages = m_engine.getManager(PageManager.class).getAllPages();
for (final Page page : allPages) {
try {
final String text = m_engine.getManager(PageManager.class).getPageText(page.getName(), WikiProvider.LATEST_VERSION);
luceneIndexPage(page, text, writer);
} catch (final IOException e) {
log.warn("Unable to index page " + page.getName() + ", continuing to next ", e);
}
}
final Collection<Attachment> allAttachments = m_engine.getManager(AttachmentManager.class).getAllAttachments();
for (final Attachment att : allAttachments) {
try {
final String text = getAttachmentContent(att.getName(), WikiProvider.LATEST_VERSION);
luceneIndexPage(att, text, writer);
} catch (final IOException e) {
log.warn("Unable to index attachment " + att.getName() + ", continuing to next", e);
}
}
}
final Date end = new Date();
log.info("Full Lucene index finished in " + (end.getTime() - start.getTime()) + " milliseconds.");
} else {
log.info("Files found in Lucene directory, not reindexing.");
}
} catch (final IOException e) {
log.error("Problem while creating Lucene index - not using Lucene.", e);
} catch (final ProviderException e) {
log.error("Problem reading pages while creating Lucene index (JSPWiki won't start.)", e);
throw new IllegalArgumentException("unable to create Lucene index");
} catch (final Exception e) {
log.error("Unable to start lucene", e);
}
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class CachingProvider method deleteVersion.
/**
* {@inheritDoc}
*/
@Override
public void deleteVersion(final String pageName, final int version) throws ProviderException {
// Luckily, this is such a rare operation it is okay to synchronize against the whole thing.
synchronized (this) {
final Page cached = getPageInfoFromCache(pageName);
final int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
// If we have this version cached, remove from cache.
if (version == PageProvider.LATEST_VERSION || version == latestcached) {
cachingManager.remove(CachingManager.CACHE_PAGES, pageName);
cachingManager.remove(CachingManager.CACHE_PAGES_TEXT, pageName);
}
m_provider.deleteVersion(pageName, version);
cachingManager.remove(CachingManager.CACHE_PAGES_HISTORY, pageName);
}
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class CachingProvider method getPageInfo.
/**
* {@inheritDoc}
*/
@Override
public Page getPageInfo(final String pageName, final int version) throws ProviderException {
final Page page;
final Page cached = getPageInfoFromCache(pageName);
final int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
if (version == PageProvider.LATEST_VERSION || version == latestcached) {
page = cached;
} else {
// We do not cache old versions.
page = m_provider.getPageInfo(pageName, version);
}
refreshMetadata(page);
return page;
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class WeblogPlugin method execute.
/**
* {@inheritDoc}
*/
@Override
public String execute(final Context context, final Map<String, String> params) throws PluginException {
final Calendar startTime;
final Calendar stopTime;
int numDays = DEFAULT_DAYS;
final Engine engine = context.getEngine();
final AuthorizationManager mgr = engine.getManager(AuthorizationManager.class);
//
// Parse parameters.
//
String days;
final DateFormat entryFormat;
String startDay;
boolean hasComments = false;
int maxEntries;
String weblogName;
if ((weblogName = params.get(PARAM_PAGE)) == null) {
weblogName = context.getPage().getName();
}
if ((days = context.getHttpParameter("weblog." + PARAM_DAYS)) == null) {
days = params.get(PARAM_DAYS);
}
if ((params.get(PARAM_ENTRYFORMAT)) == null) {
entryFormat = Preferences.getDateFormat(context, TimeFormat.DATETIME);
} else {
entryFormat = new SimpleDateFormat(params.get(PARAM_ENTRYFORMAT));
}
if (days != null) {
if (days.equalsIgnoreCase("all")) {
numDays = Integer.MAX_VALUE;
} else {
numDays = TextUtil.parseIntParameter(days, DEFAULT_DAYS);
}
}
if ((startDay = params.get(PARAM_STARTDATE)) == null) {
startDay = context.getHttpParameter("weblog." + PARAM_STARTDATE);
}
if (TextUtil.isPositive(params.get(PARAM_ALLOWCOMMENTS))) {
hasComments = true;
}
maxEntries = TextUtil.parseIntParameter(params.get(PARAM_MAXENTRIES), Integer.MAX_VALUE);
//
// Determine the date range which to include.
//
startTime = Calendar.getInstance();
stopTime = Calendar.getInstance();
if (startDay != null) {
final SimpleDateFormat fmt = new SimpleDateFormat(DEFAULT_DATEFORMAT);
try {
final Date d = fmt.parse(startDay);
startTime.setTime(d);
stopTime.setTime(d);
} catch (final ParseException e) {
return "Illegal time format: " + startDay;
}
}
//
// Mark this to be a weblog
//
context.getPage().setAttribute(ATTR_ISWEBLOG, "true");
//
// We make a wild guess here that nobody can do millisecond accuracy here.
//
startTime.add(Calendar.DAY_OF_MONTH, -numDays);
startTime.set(Calendar.HOUR, 0);
startTime.set(Calendar.MINUTE, 0);
startTime.set(Calendar.SECOND, 0);
stopTime.set(Calendar.HOUR, 23);
stopTime.set(Calendar.MINUTE, 59);
stopTime.set(Calendar.SECOND, 59);
final StringBuilder sb = new StringBuilder();
final List<Page> blogEntries = findBlogEntries(engine, weblogName, startTime.getTime(), stopTime.getTime());
blogEntries.sort(new PageDateComparator());
sb.append("<div class=\"weblog\">\n");
for (final Iterator<Page> i = blogEntries.iterator(); i.hasNext() && maxEntries-- > 0; ) {
final Page p = i.next();
if (mgr.checkPermission(context.getWikiSession(), new PagePermission(p, PagePermission.VIEW_ACTION))) {
addEntryHTML(context, entryFormat, hasComments, sb, p, params);
}
}
sb.append("</div>\n");
return sb.toString();
}
Aggregations