use of org.antlr.v4.runtime.BailErrorStrategy in project ballerina by ballerina-lang.
the class SiddhiCompiler method parse.
public static SiddhiApp parse(String source) {
ANTLRInputStream input = new ANTLRInputStream(source);
SiddhiQLLexer lexer = new SiddhiQLLexer(input);
lexer.removeErrorListeners();
lexer.addErrorListener(SiddhiErrorListener.INSTANCE);
CommonTokenStream tokens = new CommonTokenStream(lexer);
SiddhiQLParser parser = new SiddhiQLParser(tokens);
// parser.setErrorHandler(new BailErrorStrategy());
parser.removeErrorListeners();
parser.addErrorListener(SiddhiErrorListener.INSTANCE);
ParseTree tree = parser.parse();
SiddhiQLVisitor eval = new SiddhiQLBaseVisitorImpl();
return (SiddhiApp) eval.visit(tree);
}
use of org.antlr.v4.runtime.BailErrorStrategy in project graphql-java by graphql-java.
the class Parser method parseDocument.
public Document parseDocument(String input) {
GraphqlLexer lexer = new GraphqlLexer(CharStreams.fromString(input));
CommonTokenStream tokens = new CommonTokenStream(lexer);
GraphqlParser parser = new GraphqlParser(tokens);
parser.removeErrorListeners();
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.setErrorHandler(new BailErrorStrategy());
GraphqlParser.DocumentContext document = parser.document();
GraphqlAntlrToLanguage antlrToLanguage = new GraphqlAntlrToLanguage(tokens);
antlrToLanguage.visitDocument(document);
Token stop = document.getStop();
List<Token> allTokens = tokens.getTokens();
if (stop != null && allTokens != null && !allTokens.isEmpty()) {
Token last = allTokens.get(allTokens.size() - 1);
//
// do we have more tokens in the stream than we consumed in the parse?
// if yes then its invalid. We make sure its the same channel
boolean notEOF = last.getType() != Token.EOF;
boolean lastGreaterThanDocument = last.getTokenIndex() > stop.getTokenIndex();
boolean sameChannel = last.getChannel() == stop.getChannel();
if (notEOF && lastGreaterThanDocument && sameChannel) {
throw new ParseCancellationException("There are more tokens in the query that have not been consumed");
}
}
return antlrToLanguage.getResult();
}
use of org.antlr.v4.runtime.BailErrorStrategy in project antlr4 by tunnelvisionlabs.
the class ParseTreePatternMatcher method compile.
/**
* For repeated use of a tree pattern, compile it to a
* {@link ParseTreePattern} using this method.
*/
public ParseTreePattern compile(String pattern, int patternRuleIndex) {
List<? extends Token> tokenList = tokenize(pattern);
ListTokenSource tokenSrc = new ListTokenSource(tokenList);
CommonTokenStream tokens = new CommonTokenStream(tokenSrc);
ParserInterpreter parserInterp = new ParserInterpreter(parser.getGrammarFileName(), parser.getVocabulary(), Arrays.asList(parser.getRuleNames()), parser.getATNWithBypassAlts(), tokens);
ParseTree tree = null;
try {
parserInterp.setErrorHandler(new BailErrorStrategy());
tree = parserInterp.parse(patternRuleIndex);
// System.out.println("pattern tree = "+tree.toStringTree(parserInterp));
} catch (ParseCancellationException e) {
throw (RecognitionException) e.getCause();
} catch (RecognitionException re) {
throw re;
} catch (Exception e) {
throw new CannotInvokeStartRule(e);
}
// Make sure tree pattern compilation checks for a complete parse
if (tokens.LA(1) != Token.EOF) {
throw new StartRuleDoesNotConsumeFullPattern();
}
return new ParseTreePattern(this, pattern, patternRuleIndex, tree);
}
use of org.antlr.v4.runtime.BailErrorStrategy in project antlr4 by tunnelvisionlabs.
the class TestPerformance method getParserFactory.
protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
final Class<? extends ParseTreeListener> listenerClass = (Class<? extends ParseTreeListener>) loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
// construct initial instances of the lexer and parser to deserialize their ATNs
TokenSource tokenSource = lexerCtor.newInstance(CharStreams.fromString(""));
parserCtor.newInstance(new CommonTokenStream(tokenSource));
if (!REUSE_LEXER_DFA) {
Field lexerSerializedATNField = lexerClass.getField("_serializedATN");
String lexerSerializedATN = (String) lexerSerializedATNField.get(null);
for (int i = 0; i < NUMBER_OF_THREADS; i++) {
sharedLexerATNs[i] = new ATNDeserializer().deserialize(lexerSerializedATN.toCharArray());
}
}
if (RUN_PARSER && !REUSE_PARSER_DFA) {
Field parserSerializedATNField = parserClass.getField("_serializedATN");
String parserSerializedATN = (String) parserSerializedATNField.get(null);
for (int i = 0; i < NUMBER_OF_THREADS; i++) {
sharedParserATNs[i] = new ATNDeserializer().deserialize(parserSerializedATN.toCharArray());
}
}
return new ParserFactory() {
@SuppressWarnings("unused")
@Override
public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
final MurmurHashChecksum checksum = new MurmurHashChecksum();
final long startTime = System.nanoTime();
assert thread >= 0 && thread < NUMBER_OF_THREADS;
try {
ParseTreeListener listener = sharedListeners[thread];
if (listener == null) {
listener = listenerClass.newInstance();
sharedListeners[thread] = listener;
}
Lexer lexer = sharedLexers[thread];
if (REUSE_LEXER && lexer != null) {
lexer.setInputStream(input);
} else {
Lexer previousLexer = lexer;
lexer = lexerCtor.newInstance(input);
sharedLexers[thread] = lexer;
ATN atn = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getATN();
if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
atn = sharedLexerATNs[thread];
}
if (!ENABLE_LEXER_DFA) {
lexer.setInterpreter(new NonCachingLexerATNSimulator(lexer, atn));
} else if (!REUSE_LEXER_DFA || COMPUTE_TRANSITION_STATS) {
lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, atn));
}
}
lexer.removeErrorListeners();
lexer.addErrorListener(DescriptiveLexerErrorListener.INSTANCE);
lexer.getInterpreter().optimize_tail_calls = OPTIMIZE_TAIL_CALLS;
if (ENABLE_LEXER_DFA && !REUSE_LEXER_DFA) {
lexer.getInterpreter().atn.clearDFA();
}
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
tokenCount.addAndGet(currentPass, tokens.size());
if (COMPUTE_CHECKSUM) {
for (Token token : tokens.getTokens()) {
updateChecksum(checksum, token);
}
}
if (!RUN_PARSER) {
return new FileParseResult(input.getSourceName(), checksum.getValue(), null, tokens.size(), startTime, lexer, null);
}
final long parseStartTime = System.nanoTime();
Parser parser = sharedParsers[thread];
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
ATN atn = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getATN();
if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
atn = sharedLexerATNs[thread];
}
if (!ENABLE_PARSER_DFA) {
parser.setInterpreter(new NonCachingParserATNSimulator(parser, atn));
} else if (!REUSE_PARSER_DFA || COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, atn));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
if (!TWO_STAGE_PARSING) {
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
}
if (ENABLE_PARSER_DFA && !REUSE_PARSER_DFA) {
parser.getInterpreter().atn.clearDFA();
}
parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
parser.getInterpreter().force_global_context = FORCE_GLOBAL_CONTEXT && !TWO_STAGE_PARSING;
parser.getInterpreter().always_try_local_context = TRY_LOCAL_CONTEXT_FIRST || TWO_STAGE_PARSING;
parser.getInterpreter().enable_global_context_dfa = ENABLE_PARSER_FULL_CONTEXT_DFA;
parser.getInterpreter().optimize_ll1 = OPTIMIZE_LL1;
parser.getInterpreter().optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE;
parser.getInterpreter().optimize_tail_calls = OPTIMIZE_TAIL_CALLS;
parser.getInterpreter().tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL;
parser.getInterpreter().treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY;
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
parser.setErrorHandler(new BailErrorStrategy());
}
Method parseMethod = parserClass.getMethod(entryPoint);
Object parseResult;
try {
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (USE_PARSER_INTERPRETER) {
ParserInterpreter parserInterpreter = (ParserInterpreter) parser;
parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
} else {
parseResult = parseMethod.invoke(parser);
}
} catch (InvocationTargetException ex) {
if (!TWO_STAGE_PARSING) {
throw ex;
}
String sourceName = tokens.getSourceName();
sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName + ": " : "";
if (REPORT_SECOND_STAGE_RETRY) {
System.err.println(sourceName + "Forced to retry with full context.");
}
if (!(ex.getCause() instanceof ParseCancellationException)) {
throw ex;
}
tokens.seek(0);
if (REUSE_PARSER && sharedParsers[thread] != null) {
parser.setInputStream(tokens);
} else {
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
if (!ENABLE_PARSER_DFA) {
parser.setInterpreter(new NonCachingParserATNSimulator(parser, parser.getATN()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, sharedParserATNs[thread]));
} else if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN()));
}
parser.getInterpreter().setPredictionMode(PREDICTION_MODE);
parser.getInterpreter().force_global_context = FORCE_GLOBAL_CONTEXT;
parser.getInterpreter().always_try_local_context = TRY_LOCAL_CONTEXT_FIRST;
parser.getInterpreter().enable_global_context_dfa = ENABLE_PARSER_FULL_CONTEXT_DFA;
parser.getInterpreter().optimize_ll1 = OPTIMIZE_LL1;
parser.getInterpreter().optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE;
parser.getInterpreter().optimize_tail_calls = OPTIMIZE_TAIL_CALLS;
parser.getInterpreter().tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL;
parser.getInterpreter().treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY;
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR) {
parser.setErrorHandler(new BailErrorStrategy());
}
parseResult = parseMethod.invoke(parser);
}
assertThat(parseResult, instanceOf(ParseTree.class));
if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree) parseResult);
}
if (BUILD_PARSE_TREES && BLANK_LISTENER) {
ParseTreeWalker.DEFAULT.walk(listener, (ParserRuleContext) parseResult);
}
return new FileParseResult(input.getSourceName(), checksum.getValue(), (ParseTree) parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
} catch (Exception e) {
if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
return new FileParseResult("unknown", checksum.getValue(), null, 0, startTime, null, null);
}
e.printStackTrace(System.out);
throw new IllegalStateException(e);
}
}
};
} catch (Exception e) {
e.printStackTrace(System.out);
Assert.fail(e.getMessage());
throw new IllegalStateException(e);
}
}
use of org.antlr.v4.runtime.BailErrorStrategy in project Alpha by alpha-asp.
the class ProgramParserImpl method parse.
public ASPCore2Program parse(CharStream stream, Map<String, PredicateInterpretation> externals) {
// @formatter:off
/*
* // In order to require less memory: use unbuffered streams and avoid constructing a full parse tree.
* ASPCore2Lexer lexer = new ASPCore2Lexer(new UnbufferedCharStream(is));
* lexer.setTokenFactory(new CommonTokenFactory(true));
* final ASPCore2Parser parser = new ASPCore2Parser(new UnbufferedTokenStream<>(lexer));
* parser.setBuildParseTree(false);
*/
// @formatter:on
CommonTokenStream tokens = new CommonTokenStream(new ASPCore2Lexer(stream));
final ASPCore2Parser parser = new ASPCore2Parser(tokens);
// Try SLL parsing mode (faster but may terminate incorrectly).
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
final CustomErrorListener errorListener = new CustomErrorListener(stream.getSourceName());
ASPCore2Parser.ProgramContext programContext;
try {
// Parse program
programContext = parser.program();
} catch (ParseCancellationException e) {
// retry with LL parser and DefaultErrorStrategy printing errors to console.
if (e.getCause() instanceof RecognitionException) {
tokens.seek(0);
parser.addErrorListener(errorListener);
parser.setErrorHandler(new DefaultErrorStrategy());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
// Re-run parse.
programContext = parser.program();
} else {
throw e;
}
}
// is attempted) and the user will only see the first error encountered.
if (errorListener.getRecognitionException() != null) {
throw errorListener.getRecognitionException();
}
// Abort parsing if there were some (recoverable) syntax errors.
if (parser.getNumberOfSyntaxErrors() != 0) {
throw new ParseCancellationException();
}
// The union of this parser's preloaded externals and the (program-specific) externals passed to the parse method
Map<String, PredicateInterpretation> knownExternals;
if (externals != null && !externals.isEmpty()) {
knownExternals = new HashMap<>(preloadedExternals);
knownExternals.putAll(externals);
} else {
knownExternals = preloadedExternals;
}
// Construct internal program representation.
ParseTreeVisitor visitor = new ParseTreeVisitor(knownExternals);
return visitor.translate(programContext);
}
Aggregations