use of org.jf.smali.expectedTokensTestGrammarParser.ExpectedToken in project smali by JesusFreke.
the class LexerTest method runTest.
public void runTest(String test, boolean discardHiddenTokens) {
String smaliFile = String.format("LexerTest%s%s.smali", File.separatorChar, test);
String tokensFile = String.format("LexerTest%s%s.tokens", File.separatorChar, test);
org.jf.smali.expectedTokensTestGrammarLexer expectedTokensLexer = null;
try {
expectedTokensLexer = new org.jf.smali.expectedTokensTestGrammarLexer(new ANTLRInputStream(LexerTest.class.getClassLoader().getResourceAsStream(tokensFile)));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
CommonTokenStream expectedTokensStream = new CommonTokenStream(expectedTokensLexer);
org.jf.smali.expectedTokensTestGrammarParser expectedTokensParser = new org.jf.smali.expectedTokensTestGrammarParser(expectedTokensStream);
try {
expectedTokensParser.top();
} catch (RecognitionException ex) {
throw new RuntimeException(ex);
}
List<ExpectedToken> expectedTokens = expectedTokensParser.getExpectedTokens();
InputStream smaliStream = LexerTest.class.getClassLoader().getResourceAsStream(smaliFile);
if (smaliStream == null) {
Assert.fail("Could not load " + smaliFile);
}
smaliFlexLexer lexer = new smaliFlexLexer(new InputStreamReader(smaliStream));
lexer.setSourceFile(new File(test + ".smali"));
lexer.setSuppressErrors(true);
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
tokenStream.fill();
List tokens = tokenStream.getTokens();
int expectedTokenIndex = 0;
CommonToken token;
for (int i = 0; i < tokens.size() - 1; i++) {
token = (CommonToken) tokens.get(i);
if (discardHiddenTokens && token.getChannel() == smaliParser.HIDDEN) {
continue;
}
if (expectedTokenIndex >= expectedTokens.size()) {
Assert.fail("Too many tokens");
}
if (token.getType() == smaliParser.INVALID_TOKEN) {
Assert.assertTrue("Encountered an INVALID_TOKEN not on the error channel", token.getChannel() == smaliParser.ERROR_CHANNEL);
}
ExpectedToken expectedToken = expectedTokens.get(expectedTokenIndex++);
if (!tokenTypesByName.containsKey(expectedToken.tokenName)) {
Assert.fail("Unknown token: " + expectedToken.tokenName);
}
int expectedTokenType = tokenTypesByName.get(expectedToken.tokenName);
if (token.getType() != expectedTokenType) {
Assert.fail(String.format("Invalid token at index %d. Expecting %s, got %s(%s)", expectedTokenIndex - 1, expectedToken.tokenName, getTokenName(token.getType()), token.getText()));
}
if (expectedToken.tokenText != null) {
if (!expectedToken.tokenText.equals(token.getText())) {
Assert.fail(String.format("Invalid token text at index %d. Expecting text \"%s\", got \"%s\"", expectedTokenIndex - 1, expectedToken.tokenText, token.getText()));
}
}
}
if (expectedTokenIndex < expectedTokens.size()) {
Assert.fail(String.format("Not enough tokens. Expecting %d tokens, but got %d", expectedTokens.size(), expectedTokenIndex));
}
}
Aggregations