use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method default_minimum_tokens_is_one_hundred.
@Test
public void default_minimum_tokens_is_one_hundred() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", (Integer) null);
Collection<Block> originBlocks = singletonList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 100).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_from_current_file.
@Test
public void add_no_duplication_from_current_file() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build(), // Duplication is on the same file
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(46, 60).setUnit(0, 10).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ed")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertNoDuplicationAdded(ORIGIN_FILE);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_when_not_enough_tokens.
@Test
public void add_no_duplication_when_not_enough_tokens() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = singletonList(// This block contains 5 tokens -> not enough to consider it as a duplication
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 4).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertNoDuplicationAdded(ORIGIN_FILE);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class JavaDuplicationsFunctionalTest method detect.
private List<CloneGroup> detect(String... lines) {
String sourceCode = Arrays.stream(lines).collect(joining("\n"));
MemoryCloneIndex index = new MemoryCloneIndex();
List<Statement> statements = STATEMENT_CHUNKER.chunk(TOKEN_CHUNKER.chunk(sourceCode));
List<Block> blocks = BLOCK_CHUNKER.chunk("resourceId", statements);
for (Block block : blocks) {
index.insert(block);
}
return detect(index, blocks);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class JavaDuplicationsFunctionalTest method addToIndex.
private static void addToIndex(CloneIndex index, String resourceId, String sourceCode) {
List<Statement> statements = STATEMENT_CHUNKER.chunk(TOKEN_CHUNKER.chunk(sourceCode));
BlockChunker blockChunker = new BlockChunker(2);
List<Block> blocks = blockChunker.chunk(resourceId, statements);
for (Block block : blocks) {
index.insert(block);
}
}
Aggregations