use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class CachingProvider method deleteVersion.
/**
* {@inheritDoc}
*/
public void deleteVersion(String pageName, int version) throws ProviderException {
//
synchronized (this) {
WikiPage cached = getPageInfoFromCache(pageName);
int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
//
if (version == WikiPageProvider.LATEST_VERSION || version == latestcached) {
m_cache.remove(pageName);
m_textCache.remove(pageName);
}
m_provider.deleteVersion(pageName, version);
m_historyCache.remove(pageName);
}
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class CachingProvider method getPageInfo.
/**
* {@inheritDoc}
*/
public WikiPage getPageInfo(String pageName, int version) throws ProviderException {
WikiPage page = null;
WikiPage cached = getPageInfoFromCache(pageName);
int latestcached = (cached != null) ? cached.getVersion() : Integer.MIN_VALUE;
if (version == WikiPageProvider.LATEST_VERSION || version == latestcached) {
if (cached == null) {
WikiPage data = m_provider.getPageInfo(pageName, version);
if (data != null) {
m_cache.put(new Element(pageName, data));
}
page = data;
} else {
page = cached;
}
} else {
// We do not cache old versions.
page = m_provider.getPageInfo(pageName, version);
// refreshMetadata( page );
}
refreshMetadata(page);
return page;
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class AbstractReferralPlugin method filterCollection.
/**
* Filters a collection according to the include and exclude parameters.
*
* @param c The collection to filter.
* @return A filtered collection.
*/
protected Collection filterCollection(Collection c) {
ArrayList<Object> result = new ArrayList<Object>();
PatternMatcher pm = new Perl5Matcher();
for (Iterator i = c.iterator(); i.hasNext(); ) {
String pageName = null;
Object objectje = i.next();
if (objectje instanceof WikiPage) {
pageName = ((WikiPage) objectje).getName();
} else {
pageName = (String) objectje;
}
//
// If include parameter exists, then by default we include only those
// pages in it (excluding the ones in the exclude pattern list).
//
// include='*' means the same as no include.
//
boolean includeThis = m_include == null;
if (m_include != null) {
for (int j = 0; j < m_include.length; j++) {
if (pm.matches(pageName, m_include[j])) {
includeThis = true;
break;
}
}
}
if (m_exclude != null) {
for (int j = 0; j < m_exclude.length; j++) {
if (pm.matches(pageName, m_exclude[j])) {
includeThis = false;
// The inner loop, continue on the next item
break;
}
}
}
if (includeThis) {
if (objectje instanceof WikiPage) {
result.add(objectje);
} else {
result.add(pageName);
}
//
// if we want to show the last modified date of the most recently change page, we keep a "high watermark" here:
WikiPage page = null;
if (m_lastModified) {
page = m_engine.getPage(pageName);
if (page != null) {
Date lastModPage = page.getLastModified();
if (log.isDebugEnabled()) {
log.debug("lastModified Date of page " + pageName + " : " + m_dateLastModified);
}
if (lastModPage.after(m_dateLastModified)) {
m_dateLastModified = lastModPage;
}
}
}
}
}
return result;
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class BugReportHandler method execute.
/**
* {@inheritDoc}
*/
public String execute(WikiContext context, Map<String, String> params) throws PluginException {
String title;
String description;
String version;
String submitter = null;
SimpleDateFormat format = new SimpleDateFormat(DEFAULT_DATEFORMAT);
ResourceBundle rb = Preferences.getBundle(context, WikiPlugin.CORE_PLUGINS_RESOURCEBUNDLE);
title = params.get(PARAM_TITLE);
description = params.get(PARAM_DESCRIPTION);
version = params.get(PARAM_VERSION);
Principal wup = context.getCurrentUser();
if (wup != null) {
submitter = wup.getName();
}
if (title == null)
throw new PluginException(rb.getString("bugreporthandler.titlerequired"));
if (title.length() == 0)
return "";
if (description == null)
description = "";
if (version == null)
version = "unknown";
Properties mappings = parseMappings(params.get(PARAM_MAPPINGS));
try {
StringWriter str = new StringWriter();
PrintWriter out = new PrintWriter(str);
Date d = new Date();
//
// Outputting of basic data
//
out.println("|" + mappings.getProperty(PARAM_TITLE, "Title") + "|" + title);
out.println("|" + mappings.getProperty("date", "Date") + "|" + format.format(d));
out.println("|" + mappings.getProperty(PARAM_VERSION, "Version") + "|" + version);
if (submitter != null) {
out.println("|" + mappings.getProperty("submitter", "Submitter") + "|" + submitter);
}
//
for (Iterator<Map.Entry<String, String>> i = params.entrySet().iterator(); i.hasNext(); ) {
Map.Entry<String, String> entry = i.next();
if (entry.getKey().equals(PARAM_TITLE) || entry.getKey().equals(PARAM_DESCRIPTION) || entry.getKey().equals(PARAM_VERSION) || entry.getKey().equals(PARAM_MAPPINGS) || entry.getKey().equals(PARAM_PAGE) || entry.getKey().startsWith("_")) {
// Ignore this
} else {
//
// If no mapping has been defined, just ignore
// it.
//
String head = mappings.getProperty(entry.getKey(), entry.getKey());
if (head.length() > 0) {
out.println("|" + head + "|" + entry.getValue());
}
}
}
out.println();
out.println(description);
out.close();
//
// Now create a new page for this bug report
//
String pageName = findNextPage(context, title, params.get(PARAM_PAGE));
WikiPage newPage = new WikiPage(context.getEngine(), pageName);
WikiContext newContext = (WikiContext) context.clone();
newContext.setPage(newPage);
context.getEngine().saveText(newContext, str.toString());
MessageFormat formatter = new MessageFormat("");
formatter.applyPattern(rb.getString("bugreporthandler.new"));
String[] args = { "<a href=\"" + context.getViewURL(pageName) + "\">" + pageName + "</a>" };
return formatter.format(args);
} catch (RedirectException e) {
log.info("Saving not allowed, reason: '" + e.getMessage() + "', can't redirect to " + e.getRedirect());
throw new PluginException("Saving not allowed, reason: " + e.getMessage());
} catch (WikiException e) {
log.error("Unable to save page!", e);
return rb.getString("bugreporthandler.unable");
}
}
use of org.apache.wiki.WikiPage in project jspwiki by apache.
the class SpamFilter method refreshBlacklists.
/**
* If the spam filter notices changes in the black list page, it will refresh them automatically.
*
* @param context
*/
private void refreshBlacklists(WikiContext context) {
try {
boolean rebuild = false;
//
// Rebuild, if the spam words page, the attachment or the IP ban page has changed since.
//
WikiPage sourceSpam = context.getEngine().getPage(m_forbiddenWordsPage);
if (sourceSpam != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || sourceSpam.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
Attachment att = context.getEngine().getAttachmentManager().getAttachmentInfo(context, m_blacklist);
if (att != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || att.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
WikiPage sourceIPs = context.getEngine().getPage(m_forbiddenIPsPage);
if (sourceIPs != null) {
if (m_IPPatterns == null || m_IPPatterns.isEmpty() || sourceIPs.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
//
if (rebuild) {
m_lastRebuild = new Date();
m_spamPatterns = parseWordList(sourceSpam, (sourceSpam != null) ? (String) sourceSpam.getAttribute(LISTVAR) : null);
log.info("Spam filter reloaded - recognizing " + m_spamPatterns.size() + " patterns from page " + m_forbiddenWordsPage);
m_IPPatterns = parseWordList(sourceIPs, (sourceIPs != null) ? (String) sourceIPs.getAttribute(LISTIPVAR) : null);
log.info("IP filter reloaded - recognizing " + m_IPPatterns.size() + " patterns from page " + m_forbiddenIPsPage);
if (att != null) {
InputStream in = context.getEngine().getAttachmentManager().getAttachmentStream(att);
StringWriter out = new StringWriter();
FileUtil.copyContents(new InputStreamReader(in, "UTF-8"), out);
Collection<Pattern> blackList = parseBlacklist(out.toString());
log.info("...recognizing additional " + blackList.size() + " patterns from blacklist " + m_blacklist);
m_spamPatterns.addAll(blackList);
}
}
} catch (IOException ex) {
log.info("Unable to read attachment data, continuing...", ex);
} catch (ProviderException ex) {
log.info("Failed to read spam filter attachment, continuing...", ex);
}
}
Aggregations