use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class SpamFilter method checkPageName.
private void checkPageName(WikiContext context, String content, Change change) throws RedirectException {
WikiPage page = context.getPage();
String pageName = page.getName();
int maxlength = Integer.valueOf(m_pageNameMaxLength);
if (pageName.length() > maxlength) {
//
// Spam filter has a match.
//
String uid = log(context, REJECT, REASON_PAGENAME_TOO_LONG + "(" + m_pageNameMaxLength + ")", pageName);
log.info("SPAM:PageNameTooLong (" + uid + "). The length of the page name is too large (" + pageName.length() + " , limit is " + m_pageNameMaxLength + ")");
checkStrategy(context, REASON_PAGENAME_TOO_LONG, "Herb says '" + pageName + "' is a bad pageName and I trust Herb! (Incident code " + uid + ")");
}
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class ReferredPagesPlugin method execute.
/**
* {@inheritDoc}
*/
public String execute(WikiContext context, Map<String, String> params) throws PluginException {
m_engine = context.getEngine();
WikiPage page = context.getPage();
if (page == null)
return "";
// parse parameters
String rootname = params.get(PARAM_ROOT);
if (rootname == null)
rootname = page.getName();
String format = params.get(PARAM_FORMAT);
if (format == null)
format = "";
if (format.indexOf("full") >= 0)
m_formatCompact = false;
if (format.indexOf("sort") >= 0)
m_formatSort = true;
m_depth = TextUtil.parseIntParameter(params.get(PARAM_DEPTH), MIN_DEPTH);
if (m_depth > MAX_DEPTH)
m_depth = MAX_DEPTH;
String includePattern = params.get(PARAM_INCLUDE);
if (includePattern == null)
includePattern = ".*";
String excludePattern = params.get(PARAM_EXCLUDE);
if (excludePattern == null)
excludePattern = "^$";
log.debug("Fetching referred pages for " + rootname + " with a depth of " + m_depth + " with include pattern of " + includePattern + " with exclude pattern of " + excludePattern);
//
// do the actual work
//
String href = context.getViewURL(rootname);
String title = "ReferredPagesPlugin: depth[" + m_depth + "] include[" + includePattern + "] exclude[" + excludePattern + "] format[" + (m_formatCompact ? "compact" : "full") + (m_formatSort ? " sort" : "") + "]";
m_result.append("<div class=\"ReferredPagesPlugin\">\n");
m_result.append("<a class=\"wikipage\" href=\"" + href + "\" title=\"" + title + "\">" + rootname + "</a>\n");
m_exists.add(rootname);
// pre compile all needed patterns
// glob compiler : * is 0..n instance of any char -- more convenient as input
// perl5 compiler : .* is 0..n instances of any char -- more powerful
// PatternCompiler g_compiler = new GlobCompiler();
PatternCompiler compiler = new Perl5Compiler();
try {
m_includePattern = compiler.compile(includePattern);
m_excludePattern = compiler.compile(excludePattern);
} catch (MalformedPatternException e) {
if (m_includePattern == null) {
throw new PluginException("Illegal include pattern detected.");
} else if (m_excludePattern == null) {
throw new PluginException("Illegal exclude pattern detected.");
} else {
throw new PluginException("Illegal internal pattern detected.");
}
}
// go get all referred links
getReferredPages(context, rootname, 0);
// close and finish
m_result.append("</div>\n");
return m_result.toString();
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class TableOfContents method execute.
/**
* {@inheritDoc}
*/
public String execute(WikiContext context, Map<String, String> params) throws PluginException {
WikiEngine engine = context.getEngine();
WikiPage page = context.getPage();
ResourceBundle rb = Preferences.getBundle(context, WikiPlugin.CORE_PLUGINS_RESOURCEBUNDLE);
if (context.getVariable(VAR_ALREADY_PROCESSING) != null) {
// return rb.getString("tableofcontents.title");
return "<a href=\"#section-TOC\" class=\"toc\">" + rb.getString("tableofcontents.title") + "</a>";
}
StringBuilder sb = new StringBuilder();
sb.append("<div class=\"toc\">\n");
sb.append("<div class=\"collapsebox\">\n");
String title = params.get(PARAM_TITLE);
sb.append("<h4 id=\"section-TOC\">");
if (title != null) {
// sb.append("<h4>"+TextUtil.replaceEntities(title)+"</h4>\n");
sb.append(TextUtil.replaceEntities(title));
} else {
// sb.append("<h4>"+rb.getString("tableofcontents.title")+"</h4>\n");
sb.append(rb.getString("tableofcontents.title"));
}
sb.append("</h4>\n");
// should we use an ordered list?
m_usingNumberedList = false;
if (params.containsKey(PARAM_NUMBERED)) {
String numbered = params.get(PARAM_NUMBERED);
if (numbered.equalsIgnoreCase("true")) {
m_usingNumberedList = true;
} else if (numbered.equalsIgnoreCase("yes")) {
m_usingNumberedList = true;
}
}
// if we are using a numbered list, get the rest of the parameters (if any) ...
if (m_usingNumberedList) {
int start = 0;
String startStr = params.get(PARAM_START);
if ((startStr != null) && (startStr.matches("^\\d+$"))) {
start = Integer.parseInt(startStr);
}
if (start < 0)
start = 0;
m_starting = start;
m_level1Index = start - 1;
if (m_level1Index < 0)
m_level1Index = 0;
m_level2Index = 0;
m_level3Index = 0;
m_prefix = params.get(PARAM_PREFIX);
if (m_prefix == null)
m_prefix = "";
m_lastLevel = Heading.HEADING_LARGE;
}
try {
String wikiText = engine.getPureText(page);
boolean runFilters = "true".equals(engine.getVariableManager().getValue(context, WikiEngine.PROP_RUNFILTERS, "true"));
if (runFilters) {
try {
FilterManager fm = engine.getFilterManager();
wikiText = fm.doPreTranslateFiltering(context, wikiText);
} catch (Exception e) {
log.error("Could not construct table of contents: Filter Error", e);
throw new PluginException("Unable to construct table of contents (see logs)");
}
}
context.setVariable(VAR_ALREADY_PROCESSING, "x");
MarkupParser parser = engine.getRenderingManager().getParser(context, wikiText);
parser.addHeadingListener(this);
parser.parse();
sb.append("<ul>\n" + m_buf.toString() + "</ul>\n");
} catch (IOException e) {
log.error("Could not construct table of contents", e);
throw new PluginException("Unable to construct table of contents (see logs)");
}
sb.append("</div>\n</div>\n");
return sb.toString();
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class WeblogArchivePlugin method collectMonths.
private SortedSet<Calendar> collectMonths(WikiEngine engine, String page) throws ProviderException {
Comparator<Calendar> comp = new ArchiveComparator();
TreeSet<Calendar> res = new TreeSet<Calendar>(comp);
WeblogPlugin pl = new WeblogPlugin();
List<WikiPage> blogEntries = pl.findBlogEntries(engine, page, new Date(0L), new Date());
for (Iterator<WikiPage> i = blogEntries.iterator(); i.hasNext(); ) {
WikiPage p = i.next();
// FIXME: Not correct, should parse page creation time.
Date d = p.getLastModified();
Calendar cal = Calendar.getInstance();
cal.setTime(d);
res.add(cal);
}
return res;
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class VersioningFileProvider method getPageInfo.
/**
* {@inheritDoc}
*/
public WikiPage getPageInfo(String page, int version) throws ProviderException {
int latest = findLatestVersion(page);
int realVersion;
WikiPage p = null;
if (version == WikiPageProvider.LATEST_VERSION || version == latest || (version == 1 && latest == -1)) {
//
// Yes, we need to talk to the top level directory
// to get this version.
//
// I am listening to Press Play On Tape's guitar version of
// the good old C64 "Wizardry" -tune at this moment.
// Oh, the memories...
//
realVersion = (latest >= 0) ? latest : 1;
p = super.getPageInfo(page, WikiPageProvider.LATEST_VERSION);
if (p != null) {
p.setVersion(realVersion);
}
} else {
//
// The file is not the most recent, so we'll need to
// find it from the deep trenches of the "OLD" directory
// structure.
//
realVersion = version;
File dir = findOldPageDir(page);
if (!dir.exists() || !dir.isDirectory()) {
return null;
}
File file = new File(dir, version + FILE_EXT);
if (file.exists()) {
p = new WikiPage(m_engine, page);
p.setLastModified(new Date(file.lastModified()));
p.setVersion(version);
}
}
//
if (p != null) {
try {
Properties props = getPageProperties(page);
String author = props.getProperty(realVersion + ".author");
if (author == null) {
// we might not have a versioned author because the
// old page was last maintained by FileSystemProvider
Properties props2 = getHeritagePageProperties(page);
author = props2.getProperty(WikiPage.AUTHOR);
}
if (author != null) {
p.setAuthor(author);
}
String changenote = props.getProperty(realVersion + ".changenote");
if (changenote != null)
p.setAttribute(WikiPage.CHANGENOTE, changenote);
// Set the props values to the page attributes
setCustomProperties(p, props);
} catch (IOException e) {
log.error("Cannot get author for page" + page + ": ", e);
}
}
return p;
}
Aggregations