use of org.antlr.runtime.ANTLRInputStream in project ceylon by eclipse.
the class Generate method visitor.
private static void visitor(File file) throws Exception {
InputStream is = new FileInputStream(file);
ANTLRInputStream input = new ANTLRInputStream(is);
VisitorgenLexer lexer = new VisitorgenLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
VisitorgenParser parser = new VisitorgenParser(tokens);
File out = new File(GENERATED_PACKAGE_DIR + "Visitor.java");
out.createNewFile();
Util.out = new PrintStream(out);
parser.nodeList();
}
use of org.antlr.runtime.ANTLRInputStream in project ceylon by eclipse.
the class Generate method validator.
private static void validator(File file) throws Exception {
InputStream is = new FileInputStream(file);
ANTLRInputStream input = new ANTLRInputStream(is);
ValidatorgenLexer lexer = new ValidatorgenLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
ValidatorgenParser parser = new ValidatorgenParser(tokens);
File out = new File(GENERATED_PACKAGE_DIR + "Validator.java");
out.createNewFile();
Util.out = new PrintStream(out);
parser.nodeList();
}
use of org.antlr.runtime.ANTLRInputStream in project smali by JesusFreke.
the class LexerTest method runTest.
public void runTest(String test, boolean discardHiddenTokens, int apiLevel) {
String smaliFile = String.format("LexerTest%s%s.smali", File.separatorChar, test);
String tokensFile = String.format("LexerTest%s%s.tokens", File.separatorChar, test);
org.jf.smali.expectedTokensTestGrammarLexer expectedTokensLexer = null;
try {
expectedTokensLexer = new org.jf.smali.expectedTokensTestGrammarLexer(new ANTLRInputStream(LexerTest.class.getClassLoader().getResourceAsStream(tokensFile)));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
CommonTokenStream expectedTokensStream = new CommonTokenStream(expectedTokensLexer);
org.jf.smali.expectedTokensTestGrammarParser expectedTokensParser = new org.jf.smali.expectedTokensTestGrammarParser(expectedTokensStream);
try {
expectedTokensParser.top();
} catch (RecognitionException ex) {
throw new RuntimeException(ex);
}
List<ExpectedToken> expectedTokens = expectedTokensParser.getExpectedTokens();
InputStream smaliStream = LexerTest.class.getClassLoader().getResourceAsStream(smaliFile);
if (smaliStream == null) {
Assert.fail("Could not load " + smaliFile);
}
smaliFlexLexer lexer = new smaliFlexLexer(new InputStreamReader(smaliStream), apiLevel);
lexer.setSourceFile(new File(test + ".smali"));
lexer.setSuppressErrors(true);
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
tokenStream.fill();
List tokens = tokenStream.getTokens();
int expectedTokenIndex = 0;
CommonToken token;
for (int i = 0; i < tokens.size() - 1; i++) {
token = (CommonToken) tokens.get(i);
if (discardHiddenTokens && token.getChannel() == smaliParser.HIDDEN) {
continue;
}
if (expectedTokenIndex >= expectedTokens.size()) {
Assert.fail("Too many tokens");
}
if (token.getType() == smaliParser.INVALID_TOKEN) {
Assert.assertTrue("Encountered an INVALID_TOKEN not on the error channel", token.getChannel() == smaliParser.ERROR_CHANNEL);
}
ExpectedToken expectedToken = expectedTokens.get(expectedTokenIndex++);
if (!tokenTypesByName.containsKey(expectedToken.tokenName)) {
Assert.fail("Unknown token: " + expectedToken.tokenName);
}
int expectedTokenType = tokenTypesByName.get(expectedToken.tokenName);
if (token.getType() != expectedTokenType) {
Assert.fail(String.format("Invalid token at index %d. Expecting %s, got %s(%s)", expectedTokenIndex - 1, expectedToken.tokenName, getTokenName(token.getType()), token.getText()));
}
if (expectedToken.tokenText != null) {
if (!expectedToken.tokenText.equals(token.getText())) {
Assert.fail(String.format("Invalid token text at index %d. Expecting text \"%s\", got \"%s\"", expectedTokenIndex - 1, expectedToken.tokenText, token.getText()));
}
}
}
if (expectedTokenIndex < expectedTokens.size()) {
Assert.fail(String.format("Not enough tokens. Expecting %d tokens, but got %d", expectedTokens.size(), expectedTokenIndex));
}
}
Aggregations