use of org.antlr.v4.runtime.ListTokenSource in project antlr4 by tunnelvisionlabs.
the class ParseTreePatternMatcher method compile.
/**
* For repeated use of a tree pattern, compile it to a
* {@link ParseTreePattern} using this method.
*/
public ParseTreePattern compile(String pattern, int patternRuleIndex) {
List<? extends Token> tokenList = tokenize(pattern);
ListTokenSource tokenSrc = new ListTokenSource(tokenList);
CommonTokenStream tokens = new CommonTokenStream(tokenSrc);
ParserInterpreter parserInterp = new ParserInterpreter(parser.getGrammarFileName(), parser.getVocabulary(), Arrays.asList(parser.getRuleNames()), parser.getATNWithBypassAlts(), tokens);
ParseTree tree = null;
try {
parserInterp.setErrorHandler(new BailErrorStrategy());
tree = parserInterp.parse(patternRuleIndex);
// System.out.println("pattern tree = "+tree.toStringTree(parserInterp));
} catch (ParseCancellationException e) {
throw (RecognitionException) e.getCause();
} catch (RecognitionException re) {
throw re;
} catch (Exception e) {
throw new CannotInvokeStartRule(e);
}
// Make sure tree pattern compilation checks for a complete parse
if (tokens.LA(1) != Token.EOF) {
throw new StartRuleDoesNotConsumeFullPattern();
}
return new ParseTreePattern(this, pattern, patternRuleIndex, tree);
}
use of org.antlr.v4.runtime.ListTokenSource in project antlr4 by antlr.
the class ParseTreePatternMatcher method compile.
/**
* For repeated use of a tree pattern, compile it to a
* {@link ParseTreePattern} using this method.
*/
public ParseTreePattern compile(String pattern, int patternRuleIndex) {
List<? extends Token> tokenList = tokenize(pattern);
ListTokenSource tokenSrc = new ListTokenSource(tokenList);
CommonTokenStream tokens = new CommonTokenStream(tokenSrc);
ParserInterpreter parserInterp = new ParserInterpreter(parser.getGrammarFileName(), parser.getVocabulary(), Arrays.asList(parser.getRuleNames()), parser.getATNWithBypassAlts(), tokens);
ParseTree tree = null;
try {
parserInterp.setErrorHandler(new BailErrorStrategy());
tree = parserInterp.parse(patternRuleIndex);
// System.out.println("pattern tree = "+tree.toStringTree(parserInterp));
} catch (ParseCancellationException e) {
throw (RecognitionException) e.getCause();
} catch (RecognitionException re) {
throw re;
} catch (Exception e) {
throw new CannotInvokeStartRule(e);
}
// Make sure tree pattern compilation checks for a complete parse
if (tokens.LA(1) != Token.EOF) {
throw new StartRuleDoesNotConsumeFullPattern();
}
return new ParseTreePattern(this, pattern, patternRuleIndex, tree);
}
use of org.antlr.v4.runtime.ListTokenSource in project ANNIS by korpling.
the class AnnisParserAntlr method parse.
public QueryData parse(String aql, List<Long> corpusList) {
final List<AqlParseError> errors = new LinkedList<>();
AqlLexer lexerNonDNF = new AqlLexer(new ANTLRInputStream(aql));
lexerNonDNF.removeErrorListeners();
lexerNonDNF.addErrorListener(new AqlLexerErrorListener(errors));
// bring first into DNF
RawAqlPreParser rawParser = new RawAqlPreParser(new CommonTokenStream(lexerNonDNF));
rawParser.removeErrorListeners();
rawParser.addErrorListener(new AqlParseErrorListener(errors));
RawAqlPreParser.StartContext treeRaw = rawParser.start();
if (!errors.isEmpty()) {
throw new AnnisQLSyntaxException(Joiner.on("\n").join(errors), errors);
}
// treeRaw.inspect(rawParser);
ParseTreeWalker walkerRaw = new ParseTreeWalker();
RawAqlListener listenerRaw = new RawAqlListener();
walkerRaw.walk(listenerRaw, treeRaw);
LogicClause topNode = listenerRaw.getRoot();
DNFTransformer.toDNF(topNode);
// use the DNF form and parse it again
TokenSource source = new ListTokenSource(topNode.getCoveredToken());
AqlParser parserDNF = new AqlParser(new CommonTokenStream(source));
parserDNF.removeErrorListeners();
parserDNF.addErrorListener(new AqlParseErrorListener(errors));
AqlParser.StartContext treeDNF = parserDNF.start();
if (!errors.isEmpty()) {
throw new AnnisQLSyntaxException(Joiner.on("\n").join(errors), errors);
}
ParseTreeWalker walker = new ParseTreeWalker();
NodeIDListener idListener = new NodeIDListener();
walker.walk(idListener, treeDNF);
QueryNodeListener nodeListener = new QueryNodeListener(idListener.getNodeIntervalToID());
try {
walker.walk(nodeListener, treeDNF);
QueryData data = nodeListener.getQueryData();
data.setCorpusList(corpusList);
data.addMetaAnnotations(nodeListener.getMetaData());
JoinListener joinListener = new JoinListener(data, precedenceBound, nodeListener.getTokenPositions());
walker.walk(joinListener, treeDNF);
if (postProcessors != null) {
for (QueryDataTransformer transformer : postProcessors) {
data = transformer.transform(data);
}
}
return data;
} catch (NullPointerException ex) {
log.warn("Null pointer exception occured during parsing", ex);
throw new AnnisQLSemanticsException(ex.getMessage());
} catch (IllegalArgumentException ex) {
throw new AnnisQLSemanticsException(ex.getMessage());
}
}
Aggregations