use of org.antlr.v4.runtime.CommonTokenStream in project graphql-java by graphql-java.
the class Parser method parseDocument.
public Document parseDocument(String input) {
GraphqlLexer lexer = new GraphqlLexer(CharStreams.fromString(input));
CommonTokenStream tokens = new CommonTokenStream(lexer);
GraphqlParser parser = new GraphqlParser(tokens);
parser.removeErrorListeners();
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.setErrorHandler(new BailErrorStrategy());
GraphqlParser.DocumentContext document = parser.document();
GraphqlAntlrToLanguage antlrToLanguage = new GraphqlAntlrToLanguage(tokens);
antlrToLanguage.visitDocument(document);
Token stop = document.getStop();
List<Token> allTokens = tokens.getTokens();
if (stop != null && allTokens != null && !allTokens.isEmpty()) {
Token last = allTokens.get(allTokens.size() - 1);
//
// do we have more tokens in the stream than we consumed in the parse?
// if yes then its invalid. We make sure its the same channel
boolean notEOF = last.getType() != Token.EOF;
boolean lastGreaterThanDocument = last.getTokenIndex() > stop.getTokenIndex();
boolean sameChannel = last.getChannel() == stop.getChannel();
if (notEOF && lastGreaterThanDocument && sameChannel) {
throw new ParseCancellationException("There are more tokens in the query that have not been consumed");
}
}
return antlrToLanguage.getResult();
}
use of org.antlr.v4.runtime.CommonTokenStream in project kalang by kasonyang.
the class KalangCompilerTest method test.
@Test
public void test() {
KalangCompiler kc = new KalangCompiler();
kc.addSource("Test", "class{ }", "Test.kl");
kc.compile();
CompilationUnit unit = kc.getCompilationUnit("Test");
assert unit != null;
CommonTokenStream ts = unit.getTokenStream();
// the tokens contains tokens in all channels
List<Token> tokens = ts.getTokens();
assertEquals(5, ts.size());
testTokenNavigator(tokens.toArray(new Token[0]), unit.getAstBuilder().getParseTree());
}
use of org.antlr.v4.runtime.CommonTokenStream in project kalang by kasonyang.
the class TokenUtilTest method test.
@Test
public void test() {
CommonTokenStream ts = TokenStreamFactory.createTokenStream("class{ }");
int tokenSize = ts.size();
assertEquals(0, tokenSize);
List<Token> tokens = ts.getTokens();
assertEquals(0, tokens.size());
ts.consume();
ts.consume();
assertEquals("}", ts.LT(1).getText());
assertEquals("{", ts.LT(-1).getText());
assertEquals("class", ts.LT(-2).getText());
// why is it 4?
assertEquals(4, ts.size());
int consumeSize = 2;
while (ts.LA(1) != IntStream.EOF) {
ts.consume();
consumeSize++;
}
tokens = ts.getTokens();
assertEquals(5, tokens.size());
assertEquals(3, consumeSize);
}
use of org.antlr.v4.runtime.CommonTokenStream in project titan.EclipsePlug-ins by eclipse.
the class IntervalDetector method popInterval.
/**
* Pops the actual interval off of the stack, making its parent the actual interval. The ending offset of the popped off interval is set here.
* <p>
* If the actual interval is the root interval, than it is not popped off the stack. This situation can only happen in case of a syntactically
* invalid file.
* <p>
* The last non-hidden token will be the end of the interval.
*
* @param aTokenStream token stream to get the list of tokens for searching hidden tokens
*/
public final void popInterval(final CommonTokenStream aTokenStream) {
final int nonHiddenIndex = getNonHiddenTokensBefore(aTokenStream.index() - 1, aTokenStream.getTokens());
final Token t = aTokenStream.get(nonHiddenIndex);
popInterval(t);
}
use of org.antlr.v4.runtime.CommonTokenStream in project beetl2.0 by javamonkey.
the class DefaultTemplateEngine method createProgram.
@Override
public Program createProgram(Resource resource, Reader reader, Map<Integer, String> textMap, String cr, GroupTemplate gt) {
ANTLRInputStream input;
try {
input = new ANTLRInputStream(reader);
} catch (IOException e) {
// 不可能发生
throw new RuntimeException(e);
}
BeetlLexer lexer = new BeetlLexer(input);
lexer.removeErrorListeners();
lexer.addErrorListener(syntaxError);
CommonTokenStream tokens = new CommonTokenStream(lexer);
BeetlParser parser = new BeetlParser(tokens);
// 测试代码
parser.setErrorHandler(antlrErrorStrategy);
//
ProgContext tree = parser.prog();
// begin parsing at init rule
AntlrProgramBuilder pb = getAntlrBuilder(gt);
ProgramMetaData data = pb.build(tree);
Program program = new Program();
program.metaData = data;
program.res = resource;
program.rs = resource;
program.gt = gt;
program.metaData.staticTextArray = new Object[textMap.size()];
program.metaData.lineSeparator = cr;
int i = 0;
Configuration conf = gt.getConf();
String charset = conf.getCharset();
boolean byteOut = conf.isDirectByteOutput();
for (Entry<Integer, String> entry : textMap.entrySet()) {
if (byteOut) {
try {
program.metaData.staticTextArray[i++] = entry.getValue().getBytes(charset);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
} else {
program.metaData.staticTextArray[i++] = entry.getValue().toCharArray();
}
}
return program;
}
Aggregations