use of org.apache.wiki.parser.MarkupParser in project jspwiki by apache.
the class CachingProvider method refreshMetadata.
//
// FIXME: Kludge: make sure that the page is also parsed and it gets all the
// necessary variables.
//
private void refreshMetadata(WikiPage page) {
if (page != null && !page.hasMetadata()) {
RenderingManager mgr = m_engine.getRenderingManager();
try {
String data = m_provider.getPageText(page.getName(), page.getVersion());
WikiContext ctx = new WikiContext(m_engine, page);
MarkupParser parser = mgr.getParser(ctx, data);
parser.parse();
} catch (Exception ex) {
log.debug("Failed to retrieve variables for wikipage " + page);
}
}
}
use of org.apache.wiki.parser.MarkupParser in project jspwiki by apache.
the class AbstractReferralPlugin method makeHTML.
/**
* Makes HTML with common parameters.
*
* @param context The WikiContext
* @param wikitext The wikitext to render
* @return HTML
* @since 1.6.4
*/
protected String makeHTML(WikiContext context, String wikitext) {
String result = "";
RenderingManager mgr = m_engine.getRenderingManager();
try {
MarkupParser parser = mgr.getParser(context, wikitext);
parser.addLinkTransmutator(new CutMutator(m_maxwidth));
parser.enableImageInlining(false);
WikiDocument doc = parser.parse();
result = mgr.getHTML(context, doc);
} catch (IOException e) {
log.error("Failed to convert page data to HTML", e);
}
return result;
}
use of org.apache.wiki.parser.MarkupParser in project jspwiki by apache.
the class TableOfContents method execute.
/**
* {@inheritDoc}
*/
public String execute(WikiContext context, Map<String, String> params) throws PluginException {
WikiEngine engine = context.getEngine();
WikiPage page = context.getPage();
ResourceBundle rb = Preferences.getBundle(context, WikiPlugin.CORE_PLUGINS_RESOURCEBUNDLE);
if (context.getVariable(VAR_ALREADY_PROCESSING) != null) {
// return rb.getString("tableofcontents.title");
return "<a href=\"#section-TOC\" class=\"toc\">" + rb.getString("tableofcontents.title") + "</a>";
}
StringBuilder sb = new StringBuilder();
sb.append("<div class=\"toc\">\n");
sb.append("<div class=\"collapsebox\">\n");
String title = params.get(PARAM_TITLE);
sb.append("<h4 id=\"section-TOC\">");
if (title != null) {
// sb.append("<h4>"+TextUtil.replaceEntities(title)+"</h4>\n");
sb.append(TextUtil.replaceEntities(title));
} else {
// sb.append("<h4>"+rb.getString("tableofcontents.title")+"</h4>\n");
sb.append(rb.getString("tableofcontents.title"));
}
sb.append("</h4>\n");
// should we use an ordered list?
m_usingNumberedList = false;
if (params.containsKey(PARAM_NUMBERED)) {
String numbered = params.get(PARAM_NUMBERED);
if (numbered.equalsIgnoreCase("true")) {
m_usingNumberedList = true;
} else if (numbered.equalsIgnoreCase("yes")) {
m_usingNumberedList = true;
}
}
// if we are using a numbered list, get the rest of the parameters (if any) ...
if (m_usingNumberedList) {
int start = 0;
String startStr = params.get(PARAM_START);
if ((startStr != null) && (startStr.matches("^\\d+$"))) {
start = Integer.parseInt(startStr);
}
if (start < 0)
start = 0;
m_starting = start;
m_level1Index = start - 1;
if (m_level1Index < 0)
m_level1Index = 0;
m_level2Index = 0;
m_level3Index = 0;
m_prefix = params.get(PARAM_PREFIX);
if (m_prefix == null)
m_prefix = "";
m_lastLevel = Heading.HEADING_LARGE;
}
try {
String wikiText = engine.getPureText(page);
boolean runFilters = "true".equals(engine.getVariableManager().getValue(context, WikiEngine.PROP_RUNFILTERS, "true"));
if (runFilters) {
try {
FilterManager fm = engine.getFilterManager();
wikiText = fm.doPreTranslateFiltering(context, wikiText);
} catch (Exception e) {
log.error("Could not construct table of contents: Filter Error", e);
throw new PluginException("Unable to construct table of contents (see logs)");
}
}
context.setVariable(VAR_ALREADY_PROCESSING, "x");
MarkupParser parser = engine.getRenderingManager().getParser(context, wikiText);
parser.addHeadingListener(this);
parser.parse();
sb.append("<ul>\n" + m_buf.toString() + "</ul>\n");
} catch (IOException e) {
log.error("Could not construct table of contents", e);
throw new PluginException("Unable to construct table of contents (see logs)");
}
sb.append("</div>\n</div>\n");
return sb.toString();
}
use of org.apache.wiki.parser.MarkupParser in project jspwiki by apache.
the class RenderingManager method getRenderedDocument.
/**
* Returns a cached document, if one is found.
*
* @param context the wiki context
* @param pagedata the page data
* @return the rendered wiki document
* @throws IOException If rendering cannot be accomplished
*/
// FIXME: The cache management policy is not very good: deleted/changed pages should be detected better.
protected WikiDocument getRenderedDocument(WikiContext context, String pagedata) throws IOException {
String pageid = context.getRealPage().getName() + VERSION_DELIMITER + context.getRealPage().getVersion() + VERSION_DELIMITER + context.getVariable(RenderingManager.VAR_EXECUTE_PLUGINS);
if (useCache(context)) {
Element element = m_documentCache.get(pageid);
if (element != null) {
WikiDocument doc = (WikiDocument) element.getObjectValue();
// FIXME: Figure out a faster method
if (pagedata.equals(doc.getPageData())) {
if (log.isDebugEnabled()) {
log.debug("Using cached HTML for page " + pageid);
}
return doc;
}
} else if (log.isDebugEnabled()) {
log.debug("Re-rendering and storing " + pageid);
}
}
//
try {
MarkupParser parser = getParser(context, pagedata);
WikiDocument doc = parser.parse();
doc.setPageData(pagedata);
if (useCache(context)) {
m_documentCache.put(new Element(pageid, doc));
}
return doc;
} catch (IOException ex) {
log.error("Unable to parse", ex);
}
return null;
}
use of org.apache.wiki.parser.MarkupParser in project jspwiki by apache.
the class RenderingManagerTest method testCache.
/**
* Tests the relative speed of the DOM cache with respect to
* page being parsed every single time.
* @throws Exception
*/
@Test
public void testCache() throws Exception {
m_engine.saveText("TestPage", TEST_TEXT);
StopWatch sw = new StopWatch();
// System.out.println("DOM cache speed test:");
sw.start();
for (int i = 0; i < 300; i++) {
WikiPage page = m_engine.getPage("TestPage");
String pagedata = m_engine.getPureText(page);
WikiContext context = new WikiContext(m_engine, page);
MarkupParser p = m_manager.getParser(context, pagedata);
WikiDocument d = p.parse();
String html = m_manager.getHTML(context, d);
Assert.assertNotNull("noncached got null response", html);
}
sw.stop();
// System.out.println(" Nocache took "+sw);
long nocachetime = sw.getTime();
sw.reset();
sw.start();
for (int i = 0; i < 300; i++) {
WikiPage page = m_engine.getPage("TestPage");
String pagedata = m_engine.getPureText(page);
WikiContext context = new WikiContext(m_engine, page);
String html = m_manager.getHTML(context, pagedata);
Assert.assertNotNull("cached got null response", html);
}
sw.stop();
// System.out.println(" Cache took "+sw);
long speedup = nocachetime / sw.getTime();
// System.out.println(" Approx speedup: "+speedup+"x");
}
Aggregations