use of org.antlr.v4.runtime.misc.ParseCancellationException in project checkstyle by checkstyle.
the class JavadocDetailNodeParser method parseJavadocAsDetailNode.
/**
* Parses Javadoc comment as DetailNode tree.
* @param javadocCommentAst
* DetailAST of Javadoc comment
* @return DetailNode tree of Javadoc comment
*/
public ParseStatus parseJavadocAsDetailNode(DetailAST javadocCommentAst) {
blockCommentLineNumber = javadocCommentAst.getLineNo();
final String javadocComment = JavadocUtils.getJavadocCommentContent(javadocCommentAst);
// Use a new error listener each time to be able to use
// one check instance for multiple files to be checked
// without getting side effects.
errorListener = new DescriptiveErrorListener();
// Log messages should have line number in scope of file,
// not in scope of Javadoc comment.
// Offset is line number of beginning of Javadoc comment.
errorListener.setOffset(javadocCommentAst.getLineNo() - 1);
final ParseStatus result = new ParseStatus();
try {
final ParseTree parseTree = parseJavadocAsParseTree(javadocComment);
final DetailNode tree = convertParseTreeToDetailNode(parseTree);
// adjust first line to indent of /**
adjustFirstLineToJavadocIndent(tree, javadocCommentAst.getColumnNo() + JAVADOC_START.length());
result.setTree(tree);
} catch (ParseCancellationException | IllegalArgumentException ex) {
// If syntax error occurs then message is printed by error listener
// and parser throws this runtime exception to stop parsing.
// Just stop processing current Javadoc comment.
ParseErrorMessage parseErrorMessage = errorListener.getErrorMessage();
// There are cases when antlr error listener does not handle syntax error
if (parseErrorMessage == null) {
parseErrorMessage = new ParseErrorMessage(javadocCommentAst.getLineNo(), MSG_KEY_UNRECOGNIZED_ANTLR_ERROR, javadocCommentAst.getColumnNo(), ex.getMessage());
}
result.setParseErrorMessage(parseErrorMessage);
}
return result;
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project antlr4 by antlr.
the class TestPerformance method getParserFactory.
protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
// construct initial instances of the lexer and parser to deserialize their ATNs
TokenSource tokenSource = lexerCtor.newInstance(new ANTLRInputStream(""));
parserCtor.newInstance(new CommonTokenStream(tokenSource));
return new ParserFactory() {
@Override
public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
final MurmurHashChecksum checksum = new MurmurHashChecksum();
final long startTime = System.nanoTime();
assert thread >= 0 && thread < NUMBER_OF_THREADS;
try {
ParseTreeListener listener = sharedListeners[thread];
if (listener == null) {
listener = listenerClass.newInstance();
sharedListeners[thread] = listener;
}
Lexer lexer = sharedLexers[thread];
if (REUSE_LEXER && lexer != null) {
lexer.setInputStream(input);
} else {
Lexer previousLexer = lexer;
lexer = lexerCtor.newInstance(input);
DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
} else if (!REUSE_LEXER_DFA) {
lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
}
sharedLexers[thread] = lexer;
}
lexer.removeErrorListeners();
lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
if (lexer.getInterpreter().decisionToDFA[0] == null) {
ATN atn = lexer.getATN();
for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
tokenCount.addAndGet(currentPass, tokens.size());
if (COMPUTE_CHECKSUM) {
for (Token token : tokens.getTokens()) {
updateChecksum(checksum, token);
}
}
if (!RUN_PARSER) {
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), null, tokens.size(), startTime, lexer, null);
}
final long parseStartTime = System.nanoTime();
Parser parser = sharedParsers[thread];
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
if (!TWO_STAGE_PARSING) {
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
}
if (parser.getInterpreter().decisionToDFA[0] == null) {
ATN atn = parser.getATN();
for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
parser.setErrorHandler(new BailErrorStrategy());
}
Method parseMethod = parserClass.getMethod(entryPoint);
Object parseResult;
try {
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (USE_PARSER_INTERPRETER) {
ParserInterpreter parserInterpreter = (ParserInterpreter) parser;
parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
} else {
parseResult = parseMethod.invoke(parser);
}
} catch (InvocationTargetException ex) {
if (!TWO_STAGE_PARSING) {
throw ex;
}
String sourceName = tokens.getSourceName();
sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName + ": " : "";
if (REPORT_SECOND_STAGE_RETRY) {
System.err.println(sourceName + "Forced to retry with full context.");
}
if (!(ex.getCause() instanceof ParseCancellationException)) {
throw ex;
}
tokens.seek(0);
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR) {
parser.setErrorHandler(new BailErrorStrategy());
}
parseResult = parseMethod.invoke(parser);
}
assertThat(parseResult, instanceOf(ParseTree.class));
if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree) parseResult);
}
if (BUILD_PARSE_TREES && BLANK_LISTENER) {
ParseTreeWalker.DEFAULT.walk(listener, (ParseTree) parseResult);
}
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), (ParseTree) parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
} catch (Exception e) {
if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
return new FileParseResult("unknown", (int) checksum.getValue(), null, 0, startTime, null, null);
}
e.printStackTrace(System.out);
throw new IllegalStateException(e);
}
}
};
} catch (Exception e) {
e.printStackTrace(System.out);
Assert.fail(e.getMessage());
throw new IllegalStateException(e);
}
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project presto by prestodb.
the class TypeCalculation method parseTypeCalculation.
private static ParserRuleContext parseTypeCalculation(String calculation) {
TypeCalculationLexer lexer = new TypeCalculationLexer(new CaseInsensitiveStream(new ANTLRInputStream(calculation)));
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
TypeCalculationParser parser = new TypeCalculationParser(tokenStream);
lexer.removeErrorListeners();
lexer.addErrorListener(ERROR_LISTENER);
parser.removeErrorListeners();
parser.addErrorListener(ERROR_LISTENER);
ParserRuleContext tree;
try {
// first, try parsing with potentially faster SLL mode
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parser.typeCalculation();
} catch (ParseCancellationException ex) {
// if we fail, parse with LL mode
// rewind input stream
tokenStream.reset();
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parser.typeCalculation();
}
return tree;
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project L42 by ElvisResearchGroup.
the class L42 method main.
public static void main(String[] arg) throws IOException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException {
//assert false;
setClassPath(Paths.get("Plugins"));
L42.programArgs = arg;
try {
Configuration.loadAll();
Path path = Paths.get(arg[arg.length - 1]);
String code = null;
if (Files.isDirectory(path)) {
L42.setRootPath(path);
code = L42.pathToString(path.resolve("This.L42"));
} else {
L42.setRootPath(path.getParent());
code = L42.pathToString(path);
}
FinalResult res = L42.runSlow(path.toString(), code);
System.out.println("------------------------------");
System.out.println("END (zero for success): " + res.getErrCode());
} catch (ParseCancellationException parser) {
System.out.println(parser.getMessage());
//parser.printStackTrace(System.out);
} catch (ErrorMessage msg) {
ErrorFormatter.topFormatErrorMessage(msg);
} finally {
if (L42.profilerPrintOn) {
System.out.print(Timer.report());
}
}
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project Alpha by alpha-asp.
the class Main method parseVisit.
public static ParsedProgram parseVisit(ANTLRInputStream is) throws IOException {
/*
// In order to require less memory: use unbuffered streams and avoid constructing a full parse tree.
ASPCore2Lexer lexer = new ASPCore2Lexer(new UnbufferedCharStream(is));
lexer.setTokenFactory(new CommonTokenFactory(true));
final ASPCore2Parser parser = new ASPCore2Parser(new UnbufferedTokenStream<>(lexer));
parser.setBuildParseTree(false);
*/
CommonTokenStream tokens = new CommonTokenStream(new ASPCore2Lexer(is));
final ASPCore2Parser parser = new ASPCore2Parser(tokens);
// Try SLL parsing mode (faster but may terminate incorrectly).
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
final CustomErrorListener errorListener = new CustomErrorListener(is.getSourceName());
ASPCore2Parser.ProgramContext programContext;
try {
// Parse program
programContext = parser.program();
} catch (ParseCancellationException e) {
// retry with LL parser and DefaultErrorStrategy printing errors to console.
if (e.getCause() instanceof RecognitionException) {
tokens.reset();
parser.addErrorListener(errorListener);
parser.setErrorHandler(new DefaultErrorStrategy());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
// Re-run parse.
programContext = parser.program();
} else {
throw e;
}
}
// is attempted) and the user will only see the first error encountered.
if (errorListener.getRecognitionException() != null) {
throw errorListener.getRecognitionException();
}
// Construct internal program representation.
ParsedTreeVisitor visitor = new ParsedTreeVisitor();
return (ParsedProgram) visitor.visitProgram(programContext);
}
Aggregations