use of org.antlr.v4.tool.LexerGrammar in project antlr4 by antlr.
the class BaseGoTest method createATN.
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
use of org.antlr.v4.tool.LexerGrammar in project antlr4 by antlr.
the class BaseBrowserTest method getTokenTypes.
public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if (hitEOF) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if (ttype == Token.EOF) {
tokenTypes.add("EOF");
} else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if (t == IntStream.EOF) {
hitEOF = true;
}
} while (ttype != Token.EOF);
return tokenTypes;
}
use of org.antlr.v4.tool.LexerGrammar in project antlr4 by antlr.
the class BaseNodeTest method testActions.
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if (g.ast != null && !g.ast.hasErrors) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if (g.isLexer())
factory = new LexerATNFactory((LexerGrammar) g);
g.atn = factory.createATN();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser();
String output = outputFileST.render();
// System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start + b.length(), end);
assertEquals(expected, snippet);
}
if (equeue.size() > 0) {
System.err.println(equeue.toString());
}
}
use of org.antlr.v4.tool.LexerGrammar in project antlr4 by antlr.
the class BaseNodeTest method createATN.
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
use of org.antlr.v4.tool.LexerGrammar in project antlr4 by antlr.
the class BaseNodeTest method getTokenTypes.
public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if (hitEOF) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if (ttype == Token.EOF) {
tokenTypes.add("EOF");
} else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if (t == IntStream.EOF) {
hitEOF = true;
}
} while (ttype != Token.EOF);
return tokenTypes;
}
Aggregations