use of org.antlr.v4.runtime.RecognitionException in project antlr4 by antlr.
the class RuleFunction method getElementFrequenciesForAlt.
/** Given list of X and r refs in alt, compute how many of each there are */
protected Pair<FrequencySet<String>, FrequencySet<String>> getElementFrequenciesForAlt(AltAST ast) {
try {
ElementFrequenciesVisitor visitor = new ElementFrequenciesVisitor(new CommonTreeNodeStream(new GrammarASTAdaptor(), ast));
visitor.outerAlternative();
if (visitor.frequencies.size() != 1) {
factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR);
return new Pair<>(new FrequencySet<String>(), new FrequencySet<String>());
}
return new Pair<>(visitor.getMinFrequencies(), visitor.frequencies.peek());
} catch (RecognitionException ex) {
factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, ex);
return new Pair<>(new FrequencySet<String>(), new FrequencySet<String>());
}
}
use of org.antlr.v4.runtime.RecognitionException in project antlr4 by antlr.
the class TestVisitors method testVisitErrorNode.
/**
* This test verifies the basic behavior of visitors, with an emphasis on
* {@link AbstractParseTreeVisitor#visitErrorNode}.
*/
@Test
public void testVisitErrorNode() {
String input = "";
VisitorBasicLexer lexer = new VisitorBasicLexer(new ANTLRInputStream(input));
VisitorBasicParser parser = new VisitorBasicParser(new CommonTokenStream(lexer));
final List<String> errors = new ArrayList<>();
parser.removeErrorListeners();
parser.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {
errors.add("line " + line + ":" + charPositionInLine + " " + msg);
}
});
VisitorBasicParser.SContext context = parser.s();
Assert.assertEquals("(s <missing 'A'> <EOF>)", context.toStringTree(parser));
Assert.assertEquals(1, errors.size());
Assert.assertEquals("line 1:0 missing 'A' at '<EOF>'", errors.get(0));
VisitorBasicVisitor<String> listener = new VisitorBasicBaseVisitor<String>() {
@Override
public String visitErrorNode(ErrorNode node) {
return "Error encountered: " + node.getSymbol();
}
@Override
protected String defaultResult() {
return "";
}
@Override
protected String aggregateResult(String aggregate, String nextResult) {
return aggregate + nextResult;
}
};
String result = listener.visit(context);
String expected = "Error encountered: [@-1,-1:-1='<missing 'A'>',<1>,1:0]";
Assert.assertEquals(expected, result);
}
use of org.antlr.v4.runtime.RecognitionException in project antlr4 by antlr.
the class ParserATNFactory method _createATN.
protected void _createATN(Collection<Rule> rules) {
createRuleStartAndStopATNStates();
GrammarASTAdaptor adaptor = new GrammarASTAdaptor();
for (Rule r : rules) {
// find rule's block
GrammarAST blk = (GrammarAST) r.ast.getFirstChildWithType(ANTLRParser.BLOCK);
CommonTreeNodeStream nodes = new CommonTreeNodeStream(adaptor, blk);
ATNBuilder b = new ATNBuilder(nodes, this);
try {
setCurrentRuleName(r.name);
Handle h = b.ruleBlock(null);
rule(r.ast, r.name, h);
} catch (RecognitionException re) {
ErrorManager.fatalInternalError("bad grammar AST structure", re);
}
}
}
use of org.antlr.v4.runtime.RecognitionException in project antlr4 by antlr.
the class Tool method parse.
public GrammarRootAST parse(String fileName, CharStream in) {
try {
GrammarASTAdaptor adaptor = new GrammarASTAdaptor(in);
ToolANTLRLexer lexer = new ToolANTLRLexer(in, this);
CommonTokenStream tokens = new CommonTokenStream(lexer);
lexer.tokens = tokens;
ToolANTLRParser p = new ToolANTLRParser(tokens, this);
p.setTreeAdaptor(adaptor);
try {
ParserRuleReturnScope r = p.grammarSpec();
GrammarAST root = (GrammarAST) r.getTree();
if (root instanceof GrammarRootAST) {
((GrammarRootAST) root).hasErrors = lexer.getNumberOfSyntaxErrors() > 0 || p.getNumberOfSyntaxErrors() > 0;
assert ((GrammarRootAST) root).tokenStream == tokens;
if (grammarOptions != null) {
((GrammarRootAST) root).cmdLineOptions = grammarOptions;
}
return ((GrammarRootAST) root);
}
} catch (v3TreeGrammarException e) {
errMgr.grammarError(ErrorType.V3_TREE_GRAMMAR, fileName, e.location);
}
return null;
} catch (RecognitionException re) {
// TODO: do we gen errors now?
ErrorManager.internalError("can't generate this message at moment; antlr recovers");
}
return null;
}
use of org.antlr.v4.runtime.RecognitionException in project aic-praise by aic-sri-international.
the class HOGMQueryRunner method query.
public List<HOGMQueryResult> query() {
List<HOGMQueryResult> result = new ArrayList<>();
Expression queryExpr = null;
//
ParsedHOGModel parsedModel = null;
for (String query : queries) {
long startQuery = System.currentTimeMillis();
List<HOGMQueryError> errors = new ArrayList<>();
try {
if (model == null || model.trim().equals("")) {
errors.add(new HOGMQueryError(HOGMQueryError.Context.MODEL, "Model not specified", 0, 0, 0));
}
if (query == null || query.trim().equals("")) {
errors.add(new HOGMQueryError(HOGMQueryError.Context.QUERY, "Query not specified", 0, 0, 0));
}
if (errors.size() == 0) {
HOGMParserWrapper parser = new HOGMParserWrapper();
if (parsedModel == null) {
parsedModel = parser.parseModel(model, new QueryErrorListener(HOGMQueryError.Context.MODEL, errors));
}
queryExpr = parser.parseTerm(query, new QueryErrorListener(HOGMQueryError.Context.QUERY, errors));
if (errors.size() == 0) {
FactorsAndTypes factorsAndTypes = new ExpressionFactorsAndTypes(parsedModel);
if (!canceled) {
inferencer = new InferenceForFactorGraphAndEvidence(factorsAndTypes, false, null, true, getOptionalTheory());
startQuery = System.currentTimeMillis();
Expression marginal = inferencer.solve(queryExpr);
result.add(new HOGMQueryResult(query, queryExpr, parsedModel, marginal, System.currentTimeMillis() - startQuery));
}
}
}
} catch (RecognitionException re) {
errors.add(new HOGMQueryError(HOGMQueryError.Context.MODEL, re.getMessage(), re.getOffendingToken().getLine(), re.getOffendingToken().getStartIndex(), re.getOffendingToken().getStopIndex()));
} catch (UnableToParseAllTheInputError utpai) {
errors.add(new HOGMQueryError(utpai));
} catch (HOGModelException me) {
me.getErrors().forEach(modelError -> {
String inStatement = modelError.getInStatementInfo().statement.toString();
String inSource = modelError.getInStatementInfo().sourceText;
String inSubStatement = modelError.getMessage();
String inInfo = "";
if (inSubStatement.equals("") || inSubStatement.equals(inSource)) {
inInfo = " in '" + inStatement + "'";
} else {
inInfo = " ('" + inSubStatement + "') in '" + inStatement + "'";
}
if (!inSource.replaceAll(" ", "").replaceAll(";", "").equals(inStatement.replaceAll(" ", ""))) {
inInfo = inInfo + " derived from '" + inSource + "'";
}
errors.add(new HOGMQueryError(HOGMQueryError.Context.MODEL, modelError.getErrorType().formattedMessage() + inInfo, modelError.getInStatementInfo().line, modelError.getInStatementInfo().startIndex, modelError.getInStatementInfo().endIndex));
});
} catch (Throwable t) {
// Unexpected
errors.add(new HOGMQueryError(t));
}
if (errors.size() > 0) {
result.add(new HOGMQueryResult(query, queryExpr, parsedModel, errors, System.currentTimeMillis() - startQuery));
}
}
return result;
}
Aggregations