LexerTest.class.getClassLoader().getResourceAsStream(tokensFile)));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
CommonTokenStream expectedTokensStream = new CommonTokenStream(expectedTokensLexer);
expectedTokensTestGrammarParser expectedTokensParser =
new expectedTokensTestGrammarParser(expectedTokensStream);
try {
expectedTokensParser.top();
} catch (RecognitionException ex) {
throw new RuntimeException(ex);
}
List<ExpectedToken> expectedTokens = expectedTokensParser.getExpectedTokens();
InputStream smaliStream = LexerTest.class.getClassLoader().getResourceAsStream(smaliFile);
if (smaliStream == null) {
Assert.fail("Could not load " + smaliFile);
}
smaliFlexLexer lexer = new smaliFlexLexer(smaliStream);
lexer.setSourceFile(new File(test + ".smali"));
lexer.setSuppressErrors(true);
CommonTokenStream tokenStream = new CommonTokenStream(lexer);
tokenStream.fill();
List tokens = tokenStream.getTokens();
int expectedTokenIndex = 0;
CommonToken token;
for (int i=0; i<tokens.size()-1; i++) {
token = (CommonToken)tokens.get(i);