use of org.antlr.v4.runtime.Token in project grakn by graknlabs.
the class QueryParserImpl method parseList.
/**
* @param reader a reader representing several queries
* @return a list of queries
*/
@Override
public <T extends Query<?>> Stream<T> parseList(Reader reader) {
UnbufferedCharStream charStream = new UnbufferedCharStream(reader);
GraqlErrorListener errorListener = GraqlErrorListener.withoutQueryString();
GraqlLexer lexer = createLexer(charStream, errorListener);
/*
We tell the lexer to copy the text into each generated token.
Normally when calling `Token#getText`, it will look into the underlying `TokenStream` and call
`TokenStream#size` to check it is in-bounds. However, `UnbufferedTokenStream#size` is not supported
(because then it would have to read the entire input). To avoid this issue, we set this flag which will
copy over the text into each `Token`, s.t. that `Token#getText` will just look up the copied text field.
*/
lexer.setTokenFactory(new CommonTokenFactory(true));
// Use an unbuffered token stream so we can handle extremely large input strings
UnbufferedTokenStream tokenStream = new UnbufferedTokenStream(ChannelTokenSource.of(lexer));
GraqlParser parser = createParser(tokenStream, errorListener);
/*
The "bail" error strategy prevents us reading all the way to the end of the input, e.g.
```
match $x isa person; insert $x has name "Bob"; match $x isa movie; get;
^
```
In this example, when ANTLR reaches the indicated `match`, it considers two possibilities:
1. this is the end of the query
2. the user has made a mistake. Maybe they accidentally pasted the `match` here.
Because of case 2, ANTLR will parse beyond the `match` in order to produce a more helpful error message.
This causes memory issues for very large queries, so we use the simpler "bail" strategy that will
immediately stop when it hits `match`.
*/
parser.setErrorHandler(new BailErrorStrategy());
// This is a lazy iterator that will only consume a single query at a time, without parsing any further.
// This means it can pass arbitrarily long streams of queries in constant memory!
Iterable<T> queryIterator = () -> new AbstractIterator<T>() {
@Nullable
@Override
protected T computeNext() {
int latestToken = tokenStream.LA(1);
if (latestToken == Token.EOF) {
endOfData();
return null;
} else {
// When we next run it, it will start where it left off in the stream
return (T) QUERY.parse(parser, errorListener);
}
}
};
return StreamSupport.stream(queryIterator.spliterator(), false);
}
use of org.antlr.v4.runtime.Token in project grakn by graknlabs.
the class Autocomplete method getTokens.
/**
* @param query a graql query
* @return a list of tokens from running the lexer on the query
*/
private static List<? extends Token> getTokens(String query) {
ANTLRInputStream input = new ANTLRInputStream(query);
GraqlLexer lexer = new GraqlLexer(input);
// Ignore syntax errors
lexer.removeErrorListeners();
lexer.addErrorListener(new BaseErrorListener());
return lexer.getAllTokens();
}
use of org.antlr.v4.runtime.Token in project drools by kiegroup.
the class ASTBuilderVisitor method visitNameDefinition.
@Override
public BaseNode visitNameDefinition(FEEL_1_1Parser.NameDefinitionContext ctx) {
List<String> tokenStrs = new ArrayList<>();
List<Token> tokens = new ArrayList<>();
for (int i = 0; i < ctx.getChildCount(); i++) {
visit(ctx.getChild(i));
}
ParserHelper.getAllTokens(ctx, tokens);
for (Token t : tokens) {
tokenStrs.add(t.getText());
}
return ASTBuilderFactory.newNameDefNode(ctx, tokenStrs);
}
use of org.antlr.v4.runtime.Token in project hive by apache.
the class Select method subselect.
public Integer subselect(HplsqlParser.Subselect_stmtContext ctx) {
StringBuilder sql = new StringBuilder();
sql.append(ctx.start.getText());
exec.append(sql, evalPop(ctx.select_list()).toString(), ctx.start, ctx.select_list().getStart());
Token last = ctx.select_list().stop;
if (ctx.into_clause() != null) {
last = ctx.into_clause().stop;
}
if (ctx.from_clause() != null) {
exec.append(sql, evalPop(ctx.from_clause()).toString(), last, ctx.from_clause().getStart());
last = ctx.from_clause().stop;
} else if (conf.dualTable != null) {
sql.append(" FROM " + conf.dualTable);
}
if (ctx.where_clause() != null) {
exec.append(sql, evalPop(ctx.where_clause()).toString(), last, ctx.where_clause().getStart());
last = ctx.where_clause().stop;
}
if (ctx.group_by_clause() != null) {
exec.append(sql, getText(ctx.group_by_clause()), last, ctx.group_by_clause().getStart());
last = ctx.group_by_clause().stop;
}
if (ctx.having_clause() != null) {
exec.append(sql, getText(ctx.having_clause()), last, ctx.having_clause().getStart());
last = ctx.having_clause().stop;
}
if (ctx.qualify_clause() != null) {
exec.append(sql, getText(ctx.qualify_clause()), last, ctx.qualify_clause().getStart());
last = ctx.qualify_clause().stop;
}
if (ctx.order_by_clause() != null) {
exec.append(sql, getText(ctx.order_by_clause()), last, ctx.order_by_clause().getStart());
last = ctx.order_by_clause().stop;
}
if (ctx.select_options() != null) {
Var opt = evalPop(ctx.select_options());
if (!opt.isNull()) {
sql.append(" " + opt.toString());
}
}
if (ctx.select_list().select_list_limit() != null) {
sql.append(" LIMIT " + evalPop(ctx.select_list().select_list_limit().expr()));
}
exec.stackPush(sql);
return 0;
}
use of org.antlr.v4.runtime.Token in project elasticsearch by elastic.
the class ParserErrorStrategy method recover.
@Override
public void recover(final Parser recognizer, final RecognitionException re) {
final Token token = re.getOffendingToken();
String message;
if (token == null) {
message = "no parse token found.";
} else if (re instanceof InputMismatchException) {
message = "unexpected token [" + getTokenErrorDisplay(token) + "]" + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "].";
} else if (re instanceof NoViableAltException) {
if (token.getType() == PainlessParser.EOF) {
message = "unexpected end of script.";
} else {
message = "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "].";
}
} else {
message = "unexpected token near [" + getTokenErrorDisplay(token) + "].";
}
Location location = new Location(sourceName, token == null ? -1 : token.getStartIndex());
throw location.createError(new IllegalArgumentException(message, re));
}
Aggregations