use of net.sourceforge.pmd.cpd.Tokens in project sonarqube by SonarSource.
the class TokenizerBridge method chunk.
public List<TokensLine> chunk(File file) {
SourceCode sourceCode = new SourceCode(new FileCodeLoaderWithoutCache(file, encoding));
Tokens tokens = new Tokens();
TokenEntry.clearImages();
try {
tokenizer.tokenize(sourceCode, tokens);
} catch (IOException e) {
throw Throwables.propagate(e);
}
TokenEntry.clearImages();
return convert(tokens.getTokens());
}
use of net.sourceforge.pmd.cpd.Tokens in project sonarqube by SonarSource.
the class TokenizerBridgeTest method setUp.
@Before
public void setUp() {
Tokenizer tokenizer = new Tokenizer() {
public void tokenize(SourceCode tokens, Tokens tokenEntries) throws IOException {
tokenEntries.add(new TokenEntry("t1", "src", 1));
tokenEntries.add(new TokenEntry("t2", "src", 1));
tokenEntries.add(new TokenEntry("t3", "src", 2));
tokenEntries.add(new TokenEntry("t1", "src", 4));
tokenEntries.add(new TokenEntry("t3", "src", 4));
tokenEntries.add(new TokenEntry("t3", "src", 4));
tokenEntries.add(TokenEntry.getEOF());
}
};
bridge = new TokenizerBridge(tokenizer, "UTF-8", 10);
}
use of net.sourceforge.pmd.cpd.Tokens in project sonarqube by SonarSource.
the class XooTokenizerTest method testExecution.
@Test
public void testExecution() throws IOException {
File source = new File(baseDir, "src/foo.xoo");
FileUtils.write(source, "token1 token2 token3\ntoken4");
DefaultInputFile inputFile = new TestInputFileBuilder("foo", "src/foo.xoo").setLanguage("xoo").setModuleBaseDir(baseDir.toPath()).build();
fileSystem.add(inputFile);
XooTokenizer tokenizer = new XooTokenizer(fileSystem);
SourceCode sourceCode = mock(SourceCode.class);
when(sourceCode.getFileName()).thenReturn(inputFile.absolutePath());
Tokens cpdTokens = new Tokens();
tokenizer.tokenize(sourceCode, cpdTokens);
// 4 tokens + EOF
assertThat(cpdTokens.getTokens()).hasSize(5);
assertThat(cpdTokens.getTokens().get(3)).isEqualTo(new TokenEntry("token4", "src/foo.xoo", 2));
}
Aggregations