use of org.antlr.v4.runtime.Recognizer in project compiler by boalang.
the class BaseTest method lex.
protected CommonTokenStream lex(final String input, final int[] ids, final String[] strings, final String[] errors) throws IOException {
final List<String> foundErr = new ArrayList<String>();
final BoaLexer lexer = new BoaLexer(new ANTLRInputStream(new StringReader(input)));
lexer.removeErrorListeners();
lexer.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
foundErr.add(line + "," + charPositionInLine + ": " + msg);
}
});
final CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
if (ids.length > 0 && strings.length > 0)
assertEquals("ids != strings", ids.length, strings.length);
if (ids.length > 0) {
final List<Token> t = tokens.getTokens();
if (DEBUG) {
for (int i = 0; i < t.size(); i++) {
final Token token = t.get(i);
System.out.print(token.getType() + ", ");
}
System.out.println();
for (int i = 0; i < t.size(); i++) {
final Token token = t.get(i);
System.out.print(token.getText() + ", ");
}
System.out.println();
System.out.println();
}
assertEquals("wrong number of tokens", ids.length, t.size());
for (int i = 0; i < t.size(); i++) assertEquals("wrong token type", ids[i], t.get(i).getType());
}
if (strings.length > 0) {
final List<Token> t = tokens.getTokens();
assertEquals("wrong number of tokens", strings.length, t.size());
for (int i = 0; i < t.size(); i++) assertEquals("wrong token type", strings[i], t.get(i).getText());
}
assertEquals("wrong number of errors: " + input, errors.length, foundErr.size());
for (int i = 0; i < foundErr.size(); i++) assertEquals("wrong error", errors[i], foundErr.get(i));
return tokens;
}
use of org.antlr.v4.runtime.Recognizer in project compiler by boalang.
the class BaseTest method parse.
protected StartContext parse(final String input, final String[] errors) throws IOException {
final CommonTokenStream tokens = lex(input);
final BoaParser parser = new BoaParser(tokens);
final List<String> foundErr = new ArrayList<String>();
parser.removeErrorListeners();
parser.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) throws ParseCancellationException {
throw new ParseCancellationException(e);
}
});
parser.setBuildParseTree(false);
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
StartContext p;
try {
p = parser.start();
} catch (final Exception e) {
// fall-back to LL mode parsing if SLL fails
tokens.reset();
parser.reset();
parser.removeErrorListeners();
parser.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(final Recognizer<?, ?> recognizer, final Object offendingSymbol, final int line, final int charPositionInLine, final String msg, final RecognitionException e) {
foundErr.add(line + "," + charPositionInLine + ": " + msg);
}
});
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
p = parser.start();
}
if (!DEBUG)
assertEquals("wrong number of errors", errors.length, foundErr.size());
for (int i = 0; i < foundErr.size(); i++) {
if (DEBUG)
System.out.println(foundErr.get(i));
else
assertEquals("wrong error", errors[i], foundErr.get(i));
}
return p;
}
use of org.antlr.v4.runtime.Recognizer in project ksql by confluentinc.
the class KsqlParserErrorStrategy method reportNoViableAlternative.
protected void reportNoViableAlternative(Parser recognizer, NoViableAltException e) {
TokenStream tokens = recognizer.getInputStream();
String input;
if (tokens != null) {
if (e.getStartToken().getType() == -1) {
input = "<EOF>";
} else {
input = tokens.getText(e.getStartToken(), e.getOffendingToken());
}
} else {
input = "<unknown input>";
}
String msg = "no viable alternative at input " + this.escapeWSAndQuote(input);
recognizer.notifyErrorListeners(e.getOffendingToken(), msg, e);
}
use of org.antlr.v4.runtime.Recognizer in project ksql by confluentinc.
the class KsqlParserErrorStrategy method reportMissingToken.
protected void reportMissingToken(Parser recognizer) {
if (!this.inErrorRecoveryMode(recognizer)) {
this.beginErrorCondition(recognizer);
Token t = recognizer.getCurrentToken();
IntervalSet expecting = this.getExpectedTokens(recognizer);
String msg = "missing " + expecting.toString(recognizer.getVocabulary()) + " at " + this.getTokenErrorDisplay(t);
recognizer.notifyErrorListeners(t, msg, (RecognitionException) null);
}
}
use of org.antlr.v4.runtime.Recognizer in project kalang by kasonyang.
the class ErrorRecoverTest method testCode.
public void testCode(String code) {
KalangParser parser = new KalangParser(TokenStreamFactory.createTokenStream(code));
parser.setErrorHandler(new DefaultErrorStrategy() {
@Override
public void recover(Parser recognizer, RecognitionException e) {
Token ot = e.getOffendingToken();
System.out.println("offending token:" + ot.getText());
IntervalSet exceptedTokens = e.getExpectedTokens();
RuleContext ctx = e.getCtx();
if (ctx != null) {
System.out.println("context:" + ctx.getClass().getName());
}
System.out.println("offending state:" + e.getOffendingState());
String excTks = exceptedTokens.toString(KalangLexer.VOCABULARY);
System.out.println("excepted:" + excTks);
super.recover(recognizer, e);
}
@Override
public Token recoverInline(Parser recognizer) throws RecognitionException {
System.out.println("calling recover inline");
// IntervalSet exceptedTokens = recognizer.getExpectedTokens();
// if(exceptedTokens.contains(KalangLexer.SEMI)){
// Token curToken = recognizer.getCurrentToken();
// recognizer.getTokenFactory().create(
// new Pair(
// curToken.getTokenSource()
// ,
// ), lastErrorIndex, text, lastErrorIndex, lastErrorIndex, lastErrorIndex, lastErrorIndex, lastErrorIndex);
// }
Token v = super.recoverInline(recognizer);
if (v != null)
System.out.println("inserted token:" + v.getText());
return v;
}
});
parser.compilationUnit();
}
Aggregations