use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class DefaultSensorStorage method store.
@Override
public void store(NewCpdTokens cpdTokens) {
DefaultCpdTokens defaultCpdTokens = (DefaultCpdTokens) cpdTokens;
DefaultInputFile inputFile = (DefaultInputFile) defaultCpdTokens.inputFile();
inputFile.setPublished(true);
PmdBlockChunker blockChunker = new PmdBlockChunker(getCpdBlockSize(inputFile.language()));
List<Block> blocks = blockChunker.chunk(inputFile.key(), defaultCpdTokens.getTokenLines());
index.insert(inputFile, blocks);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_from_current_file.
@Test
public void add_no_duplication_from_current_file() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build(), // Duplication is on the same file
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(46, 60).setUnit(0, 10).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ed")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertNoDuplicationAdded(ORIGIN_FILE);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class DefaultSensorStorage method store.
@Override
public void store(DefaultCpdTokens defaultCpdTokens) {
DefaultInputFile inputFile = (DefaultInputFile) defaultCpdTokens.inputFile();
inputFile.setPublish(true);
PmdBlockChunker blockChunker = new PmdBlockChunker(getBlockSize(inputFile.language()));
List<Block> blocks = blockChunker.chunk(inputFile.key(), defaultCpdTokens.getTokenLines());
index.insert(inputFile, blocks);
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications.
@Test
public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications() {
Collection<Block> originBlocks = new ArrayList<>();
Collection<Block> duplicatedBlocks = new ArrayList<>();
Block.Builder blockBuilder = new Block.Builder().setIndexInFile(0).setLines(30, 45).setUnit(0, 100);
// Generate more than 100 duplication on different files
for (int i = 0; i < 110; i++) {
String hash = padStart("hash" + i, 16, 'a');
originBlocks.add(blockBuilder.setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray(hash)).build());
duplicatedBlocks.add(blockBuilder.setResourceId("resource" + i).setBlockHash(new ByteArray(hash)).build());
}
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).hasSize(100);
assertThat(logTester.logs(LoggerLevel.WARN)).containsOnly("Too many duplication groups on file " + ORIGIN_FILE_KEY + ". Keeping only the first 100 groups.");
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplication_for_java_even_when_no_token.
@Test
public void add_duplication_for_java_even_when_no_token() {
Component javaFile = builder(FILE, 1).setKey(ORIGIN_FILE_KEY).setFileAttributes(new FileAttributes(false, "java", 10)).build();
Collection<Block> originBlocks = singletonList(// This block contains 0 token
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 0).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
Aggregations