use of org.antlr.v4.runtime.BailErrorStrategy in project systemml by apache.
the class DMLParserWrapper method doParse.
/**
* This function is supposed to be called directly only from DmlSyntacticValidator when it encounters 'import'
* @param fileName script file name
* @param dmlScript script file contents
* @param sourceNamespace namespace from source statement
* @param argVals script arguments
* @return dml program, or null if at least one error
*/
public DMLProgram doParse(String fileName, String dmlScript, String sourceNamespace, Map<String, String> argVals) {
DMLProgram dmlPgm = null;
ANTLRInputStream in;
try {
if (dmlScript == null) {
dmlScript = readDMLScript(fileName, LOG);
}
InputStream stream = new ByteArrayInputStream(dmlScript.getBytes());
in = new ANTLRInputStream(stream);
} catch (FileNotFoundException e) {
throw new ParseException("Cannot find file/resource: " + fileName, e);
} catch (IOException e) {
throw new ParseException("Cannot open file: " + fileName, e);
} catch (LanguageException e) {
throw new ParseException(e.getMessage(), e);
}
ProgramrootContext ast = null;
CustomErrorListener errorListener = new CustomErrorListener();
try {
DmlLexer lexer = new DmlLexer(in);
CommonTokenStream tokens = new CommonTokenStream(lexer);
DmlParser antlr4Parser = new DmlParser(tokens);
// For now no optimization, since it is not able to parse integer value.
boolean tryOptimizedParsing = false;
if (tryOptimizedParsing) {
// Try faster and simpler SLL
antlr4Parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
antlr4Parser.removeErrorListeners();
antlr4Parser.setErrorHandler(new BailErrorStrategy());
try {
ast = antlr4Parser.programroot();
// If successful, no need to try out full LL(*) ... SLL was enough
} catch (ParseCancellationException ex) {
// Error occurred, so now try full LL(*) for better error messages
tokens.reset();
antlr4Parser.reset();
if (fileName != null) {
errorListener.setCurrentFileName(fileName);
} else {
errorListener.setCurrentFileName("MAIN_SCRIPT");
}
// Set our custom error listener
antlr4Parser.addErrorListener(errorListener);
antlr4Parser.setErrorHandler(new DefaultErrorStrategy());
antlr4Parser.getInterpreter().setPredictionMode(PredictionMode.LL);
ast = antlr4Parser.programroot();
}
} else {
// Set our custom error listener
antlr4Parser.removeErrorListeners();
antlr4Parser.addErrorListener(errorListener);
errorListener.setCurrentFileName(fileName);
// Now do the parsing
ast = antlr4Parser.programroot();
}
} catch (Exception e) {
throw new ParseException("ERROR: Cannot parse the program:" + fileName, e);
}
// Now convert the parse tree into DMLProgram
// Do syntactic validation while converting
ParseTree tree = ast;
// And also do syntactic validation
ParseTreeWalker walker = new ParseTreeWalker();
// Get list of function definitions which take precedence over built-in functions if same name
DmlPreprocessor prep = new DmlPreprocessor(errorListener);
walker.walk(prep, tree);
// Syntactic validation
DmlSyntacticValidator validator = new DmlSyntacticValidator(errorListener, argVals, sourceNamespace, prep.getFunctionDefs());
walker.walk(validator, tree);
errorListener.unsetCurrentFileName();
this.parseIssues = errorListener.getParseIssues();
this.atLeastOneWarning = errorListener.isAtLeastOneWarning();
this.atLeastOneError = errorListener.isAtLeastOneError();
if (atLeastOneError) {
throw new ParseException(parseIssues, dmlScript);
}
if (atLeastOneWarning) {
LOG.warn(CustomErrorListener.generateParseIssuesMessage(dmlScript, parseIssues));
}
dmlPgm = createDMLProgram(ast, sourceNamespace);
return dmlPgm;
}
use of org.antlr.v4.runtime.BailErrorStrategy in project antlr4 by tunnelvisionlabs.
the class GrammarParserInterpreter method deriveTempParserInterpreter.
/**
* Derive a new parser from an old one that has knowledge of the grammar.
* The Grammar object is used to correctly compute outer alternative
* numbers for parse tree nodes. A parser of the same type is created
* for subclasses of {@link ParserInterpreter}.
*/
public static ParserInterpreter deriveTempParserInterpreter(Grammar g, Parser originalParser, TokenStream tokens) {
ParserInterpreter parser;
if (originalParser instanceof ParserInterpreter) {
Class<? extends ParserInterpreter> c = originalParser.getClass().asSubclass(ParserInterpreter.class);
try {
Constructor<? extends ParserInterpreter> ctor = c.getConstructor(Grammar.class, ATN.class, TokenStream.class);
parser = ctor.newInstance(g, originalParser.getATN(), originalParser.getInputStream());
} catch (Exception e) {
throw new IllegalArgumentException("can't create parser to match incoming " + originalParser.getClass().getSimpleName(), e);
}
} else {
// must've been a generated parser
char[] serializedAtn = ATNSerializer.getSerializedAsChars(originalParser.getATN(), Arrays.asList(originalParser.getRuleNames()));
ATN deserialized = new ATNDeserializer().deserialize(serializedAtn);
parser = new ParserInterpreter(originalParser.getGrammarFileName(), originalParser.getVocabulary(), Arrays.asList(originalParser.getRuleNames()), deserialized, tokens);
}
parser.setInputStream(tokens);
// Make sure that we don't get any error messages from using this temporary parser
parser.setErrorHandler(new BailErrorStrategy());
parser.removeErrorListeners();
parser.removeParseListeners();
parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
return parser;
}
use of org.antlr.v4.runtime.BailErrorStrategy in project Alpha by alpha-asp.
the class AnswerSetsParser method parse.
public static Set<AnswerSet> parse(CharStream stream) {
final ASPCore2Parser parser = new ASPCore2Parser(new CommonTokenStream(new ASPCore2Lexer(stream)));
// Try SLL parsing mode (faster but may terminate incorrectly).
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.removeErrorListeners();
parser.setErrorHandler(new BailErrorStrategy());
return VISITOR.translate(parser.answer_sets());
}
use of org.antlr.v4.runtime.BailErrorStrategy in project antlr4 by antlr.
the class TestPerformance method getParserFactory.
protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { getTempTestDir().toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
// construct initial instances of the lexer and parser to deserialize their ATNs
TokenSource tokenSource = lexerCtor.newInstance(new ANTLRInputStream(""));
parserCtor.newInstance(new CommonTokenStream(tokenSource));
return new ParserFactory() {
@Override
public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
final MurmurHashChecksum checksum = new MurmurHashChecksum();
final long startTime = System.nanoTime();
assert thread >= 0 && thread < NUMBER_OF_THREADS;
try {
ParseTreeListener listener = sharedListeners[thread];
if (listener == null) {
listener = listenerClass.newInstance();
sharedListeners[thread] = listener;
}
Lexer lexer = sharedLexers[thread];
if (REUSE_LEXER && lexer != null) {
lexer.setInputStream(input);
} else {
Lexer previousLexer = lexer;
lexer = lexerCtor.newInstance(input);
DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
} else if (!REUSE_LEXER_DFA) {
lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
}
sharedLexers[thread] = lexer;
}
lexer.removeErrorListeners();
lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
if (lexer.getInterpreter().decisionToDFA[0] == null) {
ATN atn = lexer.getATN();
for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
tokenCount.addAndGet(currentPass, tokens.size());
if (COMPUTE_CHECKSUM) {
for (Token token : tokens.getTokens()) {
updateChecksum(checksum, token);
}
}
if (!RUN_PARSER) {
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), null, tokens.size(), startTime, lexer, null);
}
final long parseStartTime = System.nanoTime();
Parser parser = sharedParsers[thread];
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
if (!TWO_STAGE_PARSING) {
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
}
if (parser.getInterpreter().decisionToDFA[0] == null) {
ATN atn = parser.getATN();
for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
parser.setErrorHandler(new BailErrorStrategy());
}
Method parseMethod = parserClass.getMethod(entryPoint);
Object parseResult;
try {
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (USE_PARSER_INTERPRETER) {
ParserInterpreter parserInterpreter = (ParserInterpreter) parser;
parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
} else {
parseResult = parseMethod.invoke(parser);
}
} catch (InvocationTargetException ex) {
if (!TWO_STAGE_PARSING) {
throw ex;
}
String sourceName = tokens.getSourceName();
sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName + ": " : "";
if (REPORT_SECOND_STAGE_RETRY) {
System.err.println(sourceName + "Forced to retry with full context.");
}
if (!(ex.getCause() instanceof ParseCancellationException)) {
throw ex;
}
tokens.seek(0);
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR) {
parser.setErrorHandler(new BailErrorStrategy());
}
parseResult = parseMethod.invoke(parser);
}
assertThat(parseResult, instanceOf(ParseTree.class));
if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree) parseResult);
}
if (BUILD_PARSE_TREES && BLANK_LISTENER) {
ParseTreeWalker.DEFAULT.walk(listener, (ParseTree) parseResult);
}
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), (ParseTree) parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
} catch (Exception e) {
if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
return new FileParseResult("unknown", (int) checksum.getValue(), null, 0, startTime, null, null);
}
e.printStackTrace(System.out);
throw new IllegalStateException(e);
}
}
};
} catch (Exception e) {
e.printStackTrace(System.out);
Assert.fail(e.getMessage());
throw new IllegalStateException(e);
}
}
use of org.antlr.v4.runtime.BailErrorStrategy in project claw-compiler by C2SM-RCM.
the class ClawPragma method analyze.
/**
* Analyze a raw string input and match it with the CLAW language definition.
*
* @param rawPragma A raw pragma statement to be analyzed against the CLAW
* language.
* @param lineno Line number of the pragma statement.
* @return A ClawPragma object with the corresponding extracted information.
* @throws IllegalDirectiveException If directive does not follow the CLAW
* language specification.
*/
private static ClawPragma analyze(String rawPragma, int lineno) throws IllegalDirectiveException {
// Remove additional claw keyword
rawPragma = nakenize(rawPragma);
// Discard the ignored code after the claw ignore directive
if (rawPragma.toLowerCase().contains(IGNORE)) {
rawPragma = rawPragma.substring(0, rawPragma.toLowerCase().indexOf(IGNORE) + IGNORE.length());
}
// Instantiate the lexer with the raw string input
ClawLexer lexer = new ClawLexer(CharStreams.fromString(rawPragma));
// Get a list of matched tokens
CommonTokenStream tokens = new CommonTokenStream(lexer);
// Pass the tokens to the parser
ClawParser parser = new ClawParser(tokens);
parser.setErrorHandler(new BailErrorStrategy());
parser.removeErrorListeners();
try {
// Start the parser analysis from the "analyze" entry point
ClawParser.AnalyzeContext ctx = parser.analyze();
// Get the ClawPragma object return by the parser after analysis.
return ctx.l;
} catch (ParseCancellationException pcex) {
if (pcex.getCause() instanceof InputMismatchException) {
InputMismatchException imex = (InputMismatchException) pcex.getCause();
throw new IllegalDirectiveException(getTokens(imex.getExpectedTokens(), parser), lineno, imex.getOffendingToken().getCharPositionInLine());
} else if (pcex.getCause() instanceof NoViableAltException) {
NoViableAltException nvex = (NoViableAltException) pcex.getCause();
throw new IllegalDirectiveException(nvex.getOffendingToken().getText(), getTokens(nvex.getExpectedTokens(), parser), lineno, nvex.getOffendingToken().getCharPositionInLine());
}
throw new IllegalDirectiveException(rawPragma, "Unsupported construct", lineno, 0);
}
}
Aggregations