use of org.apache.wiki.api.exceptions.ProviderException in project jspwiki by apache.
the class MetaWeblogHandler method getRecentPosts.
/**
* Returns a list of the recent posts to this weblog.
*
* @param blogid The id of the blog.
* @param username The username to use
* @param password The password
* @param numberOfPosts How many posts to find
* @throws XmlRpcException If something goes wrong
* @return As per MetaweblogAPI specification
*/
// FIXME: The implementation is suboptimal, as it
// goes through all of the blog entries.
@SuppressWarnings("unchecked")
public Hashtable getRecentPosts(String blogid, String username, String password, int numberOfPosts) throws XmlRpcException {
Hashtable<String, Hashtable<String, Object>> result = new Hashtable<String, Hashtable<String, Object>>();
log.info("metaWeblog.getRecentPosts() called");
WikiPage page = m_context.getEngine().getPage(blogid);
checkPermissions(page, username, password, "view");
try {
WeblogPlugin plugin = new WeblogPlugin();
List<WikiPage> changed = plugin.findBlogEntries(m_context.getEngine(), blogid, new Date(0L), new Date());
Collections.sort(changed, new PageTimeComparator());
int items = 0;
for (Iterator<WikiPage> i = changed.iterator(); i.hasNext() && items < numberOfPosts; items++) {
WikiPage p = i.next();
result.put("entry", makeEntry(p));
}
} catch (ProviderException e) {
log.error("Failed to list recent posts", e);
throw new XmlRpcException(0, e.getMessage());
}
return result;
}
use of org.apache.wiki.api.exceptions.ProviderException in project jspwiki by apache.
the class BasicAttachmentProvider method findPageDir.
/**
* Finds storage dir, and if it exists, makes sure that it is valid.
*
* @param wikipage Page to which this attachment is attached.
*/
private File findPageDir(String wikipage) throws ProviderException {
wikipage = mangleName(wikipage);
File f = new File(m_storageDir, wikipage + DIR_EXTENSION);
if (f.exists() && !f.isDirectory()) {
throw new ProviderException("Storage dir '" + f.getAbsolutePath() + "' is not a directory!");
}
return f;
}
use of org.apache.wiki.api.exceptions.ProviderException in project jspwiki by apache.
the class BasicAttachmentProvider method listAllChanged.
/**
* {@inheritDoc}
*/
// FIXME: Very unoptimized.
public List listAllChanged(Date timestamp) throws ProviderException {
File attDir = new File(m_storageDir);
if (!attDir.exists()) {
throw new ProviderException("Specified attachment directory " + m_storageDir + " does not exist!");
}
ArrayList<Attachment> list = new ArrayList<Attachment>();
String[] pagesWithAttachments = attDir.list(new AttachmentFilter());
for (int i = 0; i < pagesWithAttachments.length; i++) {
String pageId = unmangleName(pagesWithAttachments[i]);
pageId = pageId.substring(0, pageId.length() - DIR_EXTENSION.length());
Collection c = listAttachments(new WikiPage(m_engine, pageId));
for (Iterator it = c.iterator(); it.hasNext(); ) {
Attachment att = (Attachment) it.next();
if (att.getLastModified().after(timestamp)) {
list.add(att);
}
}
}
Collections.sort(list, new PageTimeComparator());
return list;
}
use of org.apache.wiki.api.exceptions.ProviderException in project jspwiki by apache.
the class SpamFilter method refreshBlacklists.
/**
* If the spam filter notices changes in the black list page, it will refresh them automatically.
*
* @param context
*/
private void refreshBlacklists(WikiContext context) {
try {
boolean rebuild = false;
//
// Rebuild, if the spam words page, the attachment or the IP ban page has changed since.
//
WikiPage sourceSpam = context.getEngine().getPage(m_forbiddenWordsPage);
if (sourceSpam != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || sourceSpam.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
Attachment att = context.getEngine().getAttachmentManager().getAttachmentInfo(context, m_blacklist);
if (att != null) {
if (m_spamPatterns == null || m_spamPatterns.isEmpty() || att.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
WikiPage sourceIPs = context.getEngine().getPage(m_forbiddenIPsPage);
if (sourceIPs != null) {
if (m_IPPatterns == null || m_IPPatterns.isEmpty() || sourceIPs.getLastModified().after(m_lastRebuild)) {
rebuild = true;
}
}
//
if (rebuild) {
m_lastRebuild = new Date();
m_spamPatterns = parseWordList(sourceSpam, (sourceSpam != null) ? (String) sourceSpam.getAttribute(LISTVAR) : null);
log.info("Spam filter reloaded - recognizing " + m_spamPatterns.size() + " patterns from page " + m_forbiddenWordsPage);
m_IPPatterns = parseWordList(sourceIPs, (sourceIPs != null) ? (String) sourceIPs.getAttribute(LISTIPVAR) : null);
log.info("IP filter reloaded - recognizing " + m_IPPatterns.size() + " patterns from page " + m_forbiddenIPsPage);
if (att != null) {
InputStream in = context.getEngine().getAttachmentManager().getAttachmentStream(att);
StringWriter out = new StringWriter();
FileUtil.copyContents(new InputStreamReader(in, "UTF-8"), out);
Collection<Pattern> blackList = parseBlacklist(out.toString());
log.info("...recognizing additional " + blackList.size() + " patterns from blacklist " + m_blacklist);
m_spamPatterns.addAll(blackList);
}
}
} catch (IOException ex) {
log.info("Unable to read attachment data, continuing...", ex);
} catch (ProviderException ex) {
log.info("Failed to read spam filter attachment, continuing...", ex);
}
}
use of org.apache.wiki.api.exceptions.ProviderException in project jspwiki by apache.
the class WeblogArchivePlugin method execute.
/**
* {@inheritDoc}
*/
public String execute(WikiContext context, Map<String, String> params) throws PluginException {
WikiEngine engine = context.getEngine();
//
// Parameters
//
String weblogName = params.get(PARAM_PAGE);
if (weblogName == null)
weblogName = context.getPage().getName();
m_monthUrlFormat = new SimpleDateFormat("'" + context.getURL(WikiContext.VIEW, weblogName, "weblog.startDate='ddMMyy'&weblog.days=%d") + "'");
StringBuilder sb = new StringBuilder();
sb.append("<div class=\"weblogarchive\">\n");
try {
Collection<Calendar> months = collectMonths(engine, weblogName);
int year = 0;
//
// Output proper HTML.
//
sb.append("<ul>\n");
if (months.size() > 0) {
year = (months.iterator().next()).get(Calendar.YEAR);
sb.append("<li class=\"archiveyear\">" + year + "</li>\n");
}
for (Iterator<Calendar> i = months.iterator(); i.hasNext(); ) {
Calendar cal = i.next();
if (cal.get(Calendar.YEAR) != year) {
year = cal.get(Calendar.YEAR);
sb.append("<li class=\"archiveyear\">" + year + "</li>\n");
}
sb.append(" <li>");
sb.append(getMonthLink(cal));
sb.append("</li>\n");
}
sb.append("</ul>\n");
} catch (ProviderException ex) {
log.info("Cannot get archive", ex);
sb.append("Cannot get archive: " + ex.getMessage());
}
sb.append("</div>\n");
return sb.toString();
}
Aggregations