use of edu.princeton.cs.algs4.ST in project infoarchive-sip-sdk by Enterprise-Content-Management.
the class StringTemplate method prepareTemplate.
/**
* Prepares the template by adding the variables.
* @param prototype The template prototype
* @param domainObject The domain object
* @param contentInfo The reference information and the encoded content hashes
* @return The prepared template instance
*/
protected ST prepareTemplate(ST prototype, D domainObject, Map<String, ContentInfo> contentInfo) {
ST template = new ST(prototype);
template.add(MODEL_VARIABLE, domainObject);
template.add(CONTENT_VARIABLE, contentInfo);
return template;
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseGoTest method writeParserTestFile.
protected void writeParserTestFile(String parserName, String lexerName, String listenerName, String visitorName, String parserStartRuleName, boolean debug) {
ST outputFileST = new ST("package main\n" + "import (\n" + " \"github.com/antlr/antlr4/runtime/Go/antlr\"\n" + " \"./parser\"\n" + " \"os\"\n" + ")\n" + "\n" + "type TreeShapeListener struct {\n" + " *parser.Base<listenerName>\n" + "}\n" + "\n" + "func NewTreeShapeListener() *TreeShapeListener {\n" + " return new(TreeShapeListener)\n" + "}\n" + "\n" + "func (this *TreeShapeListener) EnterEveryRule(ctx antlr.ParserRuleContext) {\n" + " for i := 0; i\\<ctx.GetChildCount(); i++ {\n" + " child := ctx.GetChild(i)\n" + " parentR,ok := child.GetParent().(antlr.RuleNode)\n" + " if !ok || parentR.GetBaseRuleContext() != ctx.GetBaseRuleContext() {\n" + " panic(\"Invalid parse tree shape detected.\")\n" + " }\n" + " }\n" + "}\n" + "\n" + "func main() {\n" + " input := antlr.NewFileStream(os.Args[1])\n" + " lexer := parser.New<lexerName>(input)\n" + " stream := antlr.NewCommonTokenStream(lexer,0)\n" + "<createParser>" + " p.BuildParseTrees = true\n" + " tree := p.<parserStartRuleName>()\n" + " antlr.ParseTreeWalkerDefault.Walk(NewTreeShapeListener(), tree)\n" + "}\n");
ST createParserST = new ST(" p := parser.New<parserName>(stream)\n");
if (debug) {
createParserST = new ST(" p := parser.New<parserName>(stream)\n" + " p.AddErrorListener(antlr.NewDiagnosticErrorListener(true))\n");
}
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName.substring(0, 1).toUpperCase() + parserStartRuleName.substring(1));
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseGoTest method writeLexerTestFile.
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
ST outputFileST = new ST("package main\n" + "import (\n" + " \"github.com/antlr/antlr4/runtime/Go/antlr\"\n" + " \"./parser\"\n" + " \"os\"\n" + " \"fmt\"\n" + ")\n" + "\n" + "func main() {\n" + " input := antlr.NewFileStream(os.Args[1])\n" + " lexer := parser.New<lexerName>(input)\n" + " stream := antlr.NewCommonTokenStream(lexer,0)\n" + " stream.Fill()\n" + " for _, t := range stream.GetAllTokens() {\n" + " fmt.Println(t)\n" + " }\n" + (showDFA ? "fmt.Print(lexer.GetInterpreter().DecisionToDFA()[antlr.LexerDefaultMode].ToLexerString())\n" : "") + "}\n" + "\n");
outputFileST.add("lexerName", lexerName);
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseGoTest method testActions.
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if (g.ast != null && !g.ast.hasErrors) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if (g.isLexer())
factory = new LexerATNFactory((LexerGrammar) g);
g.atn = factory.createATN();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser();
String output = outputFileST.render();
// System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start + b.length(), end);
assertEquals(expected, snippet);
}
if (equeue.size() > 0) {
System.err.println(equeue.toString());
}
}
use of edu.princeton.cs.algs4.ST in project antlr4 by antlr.
the class BaseJavaTest method testActions.
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if (g.ast != null && !g.ast.hasErrors) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if (g.isLexer())
factory = new LexerATNFactory((LexerGrammar) g);
g.atn = factory.createATN();
AnalysisPipeline anal = new AnalysisPipeline(g);
anal.process();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser(false);
String output = outputFileST.render();
//System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start + b.length(), end);
assertEquals(expected, snippet);
}
if (equeue.size() > 0) {
// System.err.println(equeue.toString());
}
}
Aggregations