use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class SpamFilter method refreshBlacklists.
/**
* If the spam filter notices changes in the black list page, it will refresh them automatically.
*
* @param context associated WikiContext
*/
private void refreshBlacklists(final Context context) {
try {
boolean rebuild = false;
// Rebuild, if the spam words page, the attachment or the IP ban page has changed since.
final Page sourceSpam = context.getEngine().getManager(PageManager.class).getPage(m_forbiddenWordsPage);
if (sourceSpam != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || sourceSpam.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
final Attachment att = context.getEngine().getManager(AttachmentManager.class).getAttachmentInfo(context, m_blacklist);
if (att != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || att.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
final Page sourceIPs = context.getEngine().getManager(PageManager.class).getPage(m_forbiddenIPsPage);
if (sourceIPs != null) {
if (m_IPPatterns == null || m_IPPatterns.isEmpty() || sourceIPs.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
// Do the actual rebuilding. For simplicity's sake, we always rebuild the complete filter list regardless of what changed.
if (rebuild) {
m_lastRebuild = new Date();
m_spamPatterns = parseWordList(sourceSpam, (sourceSpam != null) ? sourceSpam.getAttribute(LISTVAR) : null);
log.info("Spam filter reloaded - recognizing " + m_spamPatterns.size() + " patterns from page " + m_forbiddenWordsPage);
m_IPPatterns = parseWordList(sourceIPs, (sourceIPs != null) ? sourceIPs.getAttribute(LISTIPVAR) : null);
log.info("IP filter reloaded - recognizing " + m_IPPatterns.size() + " patterns from page " + m_forbiddenIPsPage);
if (att != null) {
final InputStream in = context.getEngine().getManager(AttachmentManager.class).getAttachmentStream(att);
final StringWriter out = new StringWriter();
FileUtil.copyContents(new InputStreamReader(in, StandardCharsets.UTF_8), out);
final Collection<Pattern> blackList = parseBlacklist(out.toString());
log.info("...recognizing additional " + blackList.size() + " patterns from blacklist " + m_blacklist);
m_spamPatterns.addAll(blackList);
}
}
} catch (final IOException ex) {
log.info("Unable to read attachment data, continuing...", ex);
} catch (final ProviderException ex) {
log.info("Failed to read spam filter attachment, continuing...", ex);
}
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class SpamFilter method getChange.
/**
* Creates a simple text string describing the added content.
*
* @param context page context
* @param newText added content
* @return Empty string, if there is no change.
*/
private static Change getChange(final Context context, final String newText) {
final Page page = context.getPage();
final StringBuffer change = new StringBuffer();
final Engine engine = context.getEngine();
// Get current page version
final Change ch = new Change();
try {
final String oldText = engine.getManager(PageManager.class).getPureText(page.getName(), WikiProvider.LATEST_VERSION);
final String[] first = Diff.stringToArray(oldText);
final String[] second = Diff.stringToArray(newText);
final Revision rev = Diff.diff(first, second, new MyersDiff());
if (rev == null || rev.size() == 0) {
return ch;
}
for (int i = 0; i < rev.size(); i++) {
final Delta d = rev.getDelta(i);
if (d instanceof AddDelta) {
d.getRevised().toString(change, "", "\r\n");
ch.m_adds++;
} else if (d instanceof ChangeDelta) {
d.getRevised().toString(change, "", "\r\n");
ch.m_adds++;
} else if (d instanceof DeleteDelta) {
ch.m_removals++;
}
}
} catch (final DifferentiationFailedException e) {
log.error("Diff failed", e);
}
// Don't forget to include the change note, too
final String changeNote = page.getAttribute(Page.CHANGENOTE);
if (changeNote != null) {
change.append("\r\n");
change.append(changeNote);
}
// And author as well
if (page.getAuthor() != null) {
change.append("\r\n").append(page.getAuthor());
}
ch.m_change = change.toString();
return ch;
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class DefaultCommandResolver method findCommand.
/**
* {@inheritDoc}
*/
@Override
public Command findCommand(final HttpServletRequest request, final String defaultContext) {
// Corner case if request is null
if (request == null) {
return CommandResolver.findCommand(defaultContext);
}
Command command = null;
// Determine the name of the page (which may be null)
String pageName = extractPageFromParameter(defaultContext, request);
// Can we find a special-page command matching the extracted page?
if (pageName != null) {
command = m_specialPages.get(pageName);
}
// If we haven't found a matching command yet, extract the JSP path and compare to our list of special pages
if (command == null) {
command = extractCommandFromPath(request);
// Otherwise: use the default context
if (command == null) {
command = CONTEXTS.get(defaultContext);
if (command == null) {
throw new IllegalArgumentException("Wiki context " + defaultContext + " is illegal.");
}
}
}
// For PageCommand.VIEW, default to front page if a page wasn't supplied
if (PageCommand.VIEW.equals(command) && pageName == null) {
pageName = m_engine.getFrontPage();
}
// If we were passed a page parameter, try to resolve it
if (command instanceof PageCommand && pageName != null) {
// If there's a matching WikiPage, "wrap" the command
final Page page = resolvePage(request, pageName);
return command.targetedCommand(page);
}
// If "create group" command, target this wiki
final String wiki = m_engine.getApplicationName();
if (WikiCommand.CREATE_GROUP.equals(command)) {
return WikiCommand.CREATE_GROUP.targetedCommand(wiki);
}
// If group command, see if we were passed a group name
if (command instanceof GroupCommand) {
String groupName = request.getParameter("group");
groupName = TextUtil.replaceEntities(groupName);
if (groupName != null && !groupName.isEmpty()) {
final GroupPrincipal group = new GroupPrincipal(groupName);
return command.targetedCommand(group);
}
}
// No page provided; return an "ordinary" command
return command;
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class DefaultCommandResolver method resolvePage.
/**
* {@inheritDoc}
*/
@Override
public Page resolvePage(final HttpServletRequest request, String page) {
// See if the user included a version parameter
int version = WikiProvider.LATEST_VERSION;
final String rev = request.getParameter("version");
if (rev != null) {
try {
version = Integer.parseInt(rev);
} catch (final NumberFormatException e) {
// This happens a lot with bots or other guys who are trying to test if we are vulnerable to e.g. XSS attacks. We catch
// it here so that the admin does not get tons of mail.
}
}
Page wikipage = m_engine.getManager(PageManager.class).getPage(page, version);
if (wikipage == null) {
page = MarkupParser.cleanLink(page);
wikipage = Wiki.contents().page(m_engine, page);
}
return wikipage;
}
use of org.apache.wiki.api.core.Page in project jspwiki by apache.
the class SearchManagerBean method reload.
/**
* Starts a background thread which goes through all the pages and adds them to the reindex queue.
* <p>
* This method prevents itself from being called twice.
*/
public synchronized void reload() {
if (m_updater == null) {
m_updater = new WikiBackgroundThread(m_engine, 0) {
int m_count;
int m_max;
@Override
public void startupTask() throws Exception {
super.startupTask();
setName("Reindexer started");
}
@Override
public void backgroundTask() throws Exception {
final Collection<Page> allPages = m_engine.getManager(PageManager.class).getAllPages();
final SearchManager mgr = m_engine.getManager(SearchManager.class);
m_max = allPages.size();
final ProgressItem pi = new ProgressItem() {
@Override
public int getProgress() {
return 100 * m_count / m_max;
}
};
m_engine.getManager(ProgressManager.class).startProgress(pi, PROGRESS_ID);
for (final Page page : allPages) {
mgr.reindexPage(page);
m_count++;
}
m_engine.getManager(ProgressManager.class).stopProgress(PROGRESS_ID);
shutdown();
m_updater = null;
}
};
m_updater.start();
}
}
Aggregations