use of org.antlr.v4.runtime.misc.ParseCancellationException in project checkstyle by checkstyle.
the class JavadocDetailNodeParser method parseJavadocAsDetailNode.
/**
* Parses Javadoc comment as DetailNode tree.
*
* @param javadocCommentAst
* DetailAST of Javadoc comment
* @return DetailNode tree of Javadoc comment
*/
public ParseStatus parseJavadocAsDetailNode(DetailAST javadocCommentAst) {
blockCommentLineNumber = javadocCommentAst.getLineNo();
final String javadocComment = JavadocUtil.getJavadocCommentContent(javadocCommentAst);
// Use a new error listener each time to be able to use
// one check instance for multiple files to be checked
// without getting side effects.
final DescriptiveErrorListener errorListener = new DescriptiveErrorListener();
// Log messages should have line number in scope of file,
// not in scope of Javadoc comment.
// Offset is line number of beginning of Javadoc comment.
errorListener.setOffset(javadocCommentAst.getLineNo() - 1);
final ParseStatus result = new ParseStatus();
try {
final JavadocParser javadocParser = createJavadocParser(javadocComment, errorListener);
final ParseTree javadocParseTree = javadocParser.javadoc();
final DetailNode tree = convertParseTreeToDetailNode(javadocParseTree);
// adjust first line to indent of /**
adjustFirstLineToJavadocIndent(tree, javadocCommentAst.getColumnNo() + JAVADOC_START.length());
result.setTree(tree);
result.firstNonTightHtmlTag = getFirstNonTightHtmlTag(javadocParser, errorListener.offset);
} catch (ParseCancellationException | IllegalArgumentException ex) {
ParseErrorMessage parseErrorMessage = null;
if (ex.getCause() instanceof FailedPredicateException || ex.getCause() instanceof NoViableAltException) {
final RecognitionException recognitionEx = (RecognitionException) ex.getCause();
if (recognitionEx.getCtx() instanceof JavadocParser.HtmlTagContext) {
final Token htmlTagNameStart = getMissedHtmlTag(recognitionEx);
parseErrorMessage = new ParseErrorMessage(errorListener.offset + htmlTagNameStart.getLine(), MSG_JAVADOC_MISSED_HTML_CLOSE, htmlTagNameStart.getCharPositionInLine(), htmlTagNameStart.getText());
}
}
if (parseErrorMessage == null) {
// If syntax error occurs then message is printed by error listener
// and parser throws this runtime exception to stop parsing.
// Just stop processing current Javadoc comment.
parseErrorMessage = errorListener.getErrorMessage();
}
result.setParseErrorMessage(parseErrorMessage);
}
return result;
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project antlr4 by antlr.
the class TestPerformance method getParserFactory.
protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { getTempTestDir().toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
// construct initial instances of the lexer and parser to deserialize their ATNs
TokenSource tokenSource = lexerCtor.newInstance(new ANTLRInputStream(""));
parserCtor.newInstance(new CommonTokenStream(tokenSource));
return new ParserFactory() {
@Override
public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
final MurmurHashChecksum checksum = new MurmurHashChecksum();
final long startTime = System.nanoTime();
assert thread >= 0 && thread < NUMBER_OF_THREADS;
try {
ParseTreeListener listener = sharedListeners[thread];
if (listener == null) {
listener = listenerClass.newInstance();
sharedListeners[thread] = listener;
}
Lexer lexer = sharedLexers[thread];
if (REUSE_LEXER && lexer != null) {
lexer.setInputStream(input);
} else {
Lexer previousLexer = lexer;
lexer = lexerCtor.newInstance(input);
DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
} else if (!REUSE_LEXER_DFA) {
lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
}
sharedLexers[thread] = lexer;
}
lexer.removeErrorListeners();
lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
if (lexer.getInterpreter().decisionToDFA[0] == null) {
ATN atn = lexer.getATN();
for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
tokenCount.addAndGet(currentPass, tokens.size());
if (COMPUTE_CHECKSUM) {
for (Token token : tokens.getTokens()) {
updateChecksum(checksum, token);
}
}
if (!RUN_PARSER) {
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), null, tokens.size(), startTime, lexer, null);
}
final long parseStartTime = System.nanoTime();
Parser parser = sharedParsers[thread];
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
if (!TWO_STAGE_PARSING) {
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
}
if (parser.getInterpreter().decisionToDFA[0] == null) {
ATN atn = parser.getATN();
for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
parser.setErrorHandler(new BailErrorStrategy());
}
Method parseMethod = parserClass.getMethod(entryPoint);
Object parseResult;
try {
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (USE_PARSER_INTERPRETER) {
ParserInterpreter parserInterpreter = (ParserInterpreter) parser;
parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
} else {
parseResult = parseMethod.invoke(parser);
}
} catch (InvocationTargetException ex) {
if (!TWO_STAGE_PARSING) {
throw ex;
}
String sourceName = tokens.getSourceName();
sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName + ": " : "";
if (REPORT_SECOND_STAGE_RETRY) {
System.err.println(sourceName + "Forced to retry with full context.");
}
if (!(ex.getCause() instanceof ParseCancellationException)) {
throw ex;
}
tokens.seek(0);
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR) {
parser.setErrorHandler(new BailErrorStrategy());
}
parseResult = parseMethod.invoke(parser);
}
assertThat(parseResult, instanceOf(ParseTree.class));
if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree) parseResult);
}
if (BUILD_PARSE_TREES && BLANK_LISTENER) {
ParseTreeWalker.DEFAULT.walk(listener, (ParseTree) parseResult);
}
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), (ParseTree) parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
} catch (Exception e) {
if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
return new FileParseResult("unknown", (int) checksum.getValue(), null, 0, startTime, null, null);
}
e.printStackTrace(System.out);
throw new IllegalStateException(e);
}
}
};
} catch (Exception e) {
e.printStackTrace(System.out);
Assert.fail(e.getMessage());
throw new IllegalStateException(e);
}
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project claw-compiler by C2SM-RCM.
the class ClawPragma method analyze.
/**
* Analyze a raw string input and match it with the CLAW language definition.
*
* @param rawPragma A raw pragma statement to be analyzed against the CLAW
* language.
* @param lineno Line number of the pragma statement.
* @return A ClawPragma object with the corresponding extracted information.
* @throws IllegalDirectiveException If directive does not follow the CLAW
* language specification.
*/
private static ClawPragma analyze(String rawPragma, int lineno) throws IllegalDirectiveException {
// Remove additional claw keyword
rawPragma = nakenize(rawPragma);
// Discard the ignored code after the claw ignore directive
if (rawPragma.toLowerCase().contains(IGNORE)) {
rawPragma = rawPragma.substring(0, rawPragma.toLowerCase().indexOf(IGNORE) + IGNORE.length());
}
// Instantiate the lexer with the raw string input
ClawLexer lexer = new ClawLexer(CharStreams.fromString(rawPragma));
// Get a list of matched tokens
CommonTokenStream tokens = new CommonTokenStream(lexer);
// Pass the tokens to the parser
ClawParser parser = new ClawParser(tokens);
parser.setErrorHandler(new BailErrorStrategy());
parser.removeErrorListeners();
try {
// Start the parser analysis from the "analyze" entry point
ClawParser.AnalyzeContext ctx = parser.analyze();
// Get the ClawPragma object return by the parser after analysis.
return ctx.l;
} catch (ParseCancellationException pcex) {
if (pcex.getCause() instanceof InputMismatchException) {
InputMismatchException imex = (InputMismatchException) pcex.getCause();
throw new IllegalDirectiveException(getTokens(imex.getExpectedTokens(), parser), lineno, imex.getOffendingToken().getCharPositionInLine());
} else if (pcex.getCause() instanceof NoViableAltException) {
NoViableAltException nvex = (NoViableAltException) pcex.getCause();
throw new IllegalDirectiveException(nvex.getOffendingToken().getText(), getTokens(nvex.getExpectedTokens(), parser), lineno, nvex.getOffendingToken().getCharPositionInLine());
}
throw new IllegalDirectiveException(rawPragma, "Unsupported construct", lineno, 0);
}
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project crate by crate.
the class PgArrayParser method invokeParser.
private Object invokeParser(InputStream inputStream, Function<io.crate.protocols.postgres.antlr.v4.PgArrayParser, ParserRuleContext> parseFunction, Function<byte[], Object> convert) {
try {
var lexer = new PgArrayLexer(CharStreams.fromStream(inputStream, StandardCharsets.UTF_8));
var tokenStream = new CommonTokenStream(lexer);
var parser = new io.crate.protocols.postgres.antlr.v4.PgArrayParser(tokenStream);
lexer.removeErrorListeners();
lexer.addErrorListener(ERROR_LISTENER);
parser.removeErrorListeners();
parser.addErrorListener(ERROR_LISTENER);
ParserRuleContext tree;
try {
// first, try parsing with potentially faster SLL mode
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
tree = parseFunction.apply(parser);
} catch (ParseCancellationException ex) {
// if we fail, parse with LL mode
// rewind input stream
tokenStream.seek(0);
parser.reset();
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parseFunction.apply(parser);
}
return tree.accept(new PgArrayASTVisitor(convert));
} catch (StackOverflowError e) {
throw new PgArrayParsingException("stack overflow while parsing: " + e.getLocalizedMessage());
} catch (IOException e) {
return new IllegalArgumentException(e);
}
}
use of org.antlr.v4.runtime.misc.ParseCancellationException in project jwt by emweb.
the class CssParser method parse.
private StyleSheet parse(CharStream stream) throws RecognitionException, ParseCancellationException {
errorListener_.reset();
Listener listener = new Listener();
Css22Lexer lex = new Css22Lexer(stream);
lex.removeErrorListener(ConsoleErrorListener.INSTANCE);
lex.addErrorListener(errorListener_);
CommonTokenStream tokens = new CommonTokenStream(lex);
Css22Parser parser = new Css22Parser(tokens);
parser.addParseListener(listener);
parser.removeErrorListener(ConsoleErrorListener.INSTANCE);
parser.addErrorListener(errorListener_);
parser.styleSheet();
if (!getLastError().isEmpty()) {
return null;
} else {
return listener.getCurrentStylesheet();
}
}
Aggregations