use of net.sourceforge.pmd.cpd.TokenEntry in project sonarqube by SonarSource.
the class XooTokenizer method tokenize.
@Override
public final void tokenize(SourceCode source, Tokens cpdTokens) {
String fileName = source.getFileName();
LOG.info("Using deprecated tokenizer extension point to tokenize {}", fileName);
int lineIdx = 1;
try {
for (String line : FileUtils.readLines(new File(fileName), fs.encoding())) {
for (String token : Splitter.on(" ").split(line)) {
TokenEntry cpdToken = new TokenEntry(token, fileName, lineIdx);
cpdTokens.add(cpdToken);
}
lineIdx++;
}
} catch (IOException e) {
throw new IllegalStateException("Unable to tokenize", e);
}
cpdTokens.add(TokenEntry.getEOF());
}
use of net.sourceforge.pmd.cpd.TokenEntry in project sonarqube by SonarSource.
the class XooTokenizerTest method testExecution.
@Test
public void testExecution() throws IOException {
File source = new File(baseDir, "src/foo.xoo");
FileUtils.write(source, "token1 token2 token3\ntoken4");
DefaultInputFile inputFile = new TestInputFileBuilder("foo", "src/foo.xoo").setLanguage("xoo").setModuleBaseDir(baseDir.toPath()).build();
fileSystem.add(inputFile);
XooTokenizer tokenizer = new XooTokenizer(fileSystem);
SourceCode sourceCode = mock(SourceCode.class);
when(sourceCode.getFileName()).thenReturn(inputFile.absolutePath());
Tokens cpdTokens = new Tokens();
tokenizer.tokenize(sourceCode, cpdTokens);
// 4 tokens + EOF
assertThat(cpdTokens.getTokens()).hasSize(5);
assertThat(cpdTokens.getTokens().get(3)).isEqualTo(new TokenEntry("token4", "src/foo.xoo", 2));
}
Aggregations