use of org.sonar.scanner.protocol.output.ScannerReport.Duplication in project sonarqube by SonarSource.
the class CpdExecutorTest method reportTooManyDuplicates.
@Test
public void reportTooManyDuplicates() throws Exception {
// 1 origin part + 101 duplicates = 102
List<ClonePart> parts = new ArrayList<>(CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2);
for (int i = 0; i < CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2; i++) {
parts.add(new ClonePart(batchComponent1.key(), i, i, i + 1));
}
List<CloneGroup> groups = Arrays.asList(CloneGroup.builder().setLength(0).setOrigin(parts.get(0)).setParts(parts).build());
executor.saveDuplications(batchComponent1, groups);
Duplication[] dups = readDuplications(1);
assertThat(dups[0].getDuplicateList()).hasSize(CpdExecutor.MAX_CLONE_PART_PER_GROUP);
assertThat(logTester.logs(LoggerLevel.WARN)).contains("Too many duplication references on file " + batchComponent1 + " for block at line 0. Keep only the first " + CpdExecutor.MAX_CLONE_PART_PER_GROUP + " references.");
}
use of org.sonar.scanner.protocol.output.ScannerReport.Duplication in project sonarqube by SonarSource.
the class CpdExecutorTest method should_limit_number_of_references.
@Test
public void should_limit_number_of_references() {
// 1 origin part + 101 duplicates = 102
List<ClonePart> parts = new ArrayList<>(CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2);
for (int i = 0; i < CpdExecutor.MAX_CLONE_PART_PER_GROUP + 2; i++) {
parts.add(new ClonePart(batchComponent1.key(), i, i, i + 1));
}
List<CloneGroup> groups = Collections.singletonList(CloneGroup.builder().setLength(0).setOrigin(parts.get(0)).setParts(parts).build());
executor.saveDuplications(batchComponent1, groups);
Duplication[] dups = readDuplications(1);
assertThat(dups[0].getDuplicateList()).hasSize(CpdExecutor.MAX_CLONE_PART_PER_GROUP);
assertThat(logTester.logs(LoggerLevel.WARN)).contains("Too many duplication references on file " + batchComponent1 + " for block at line 0. Keep only the first " + CpdExecutor.MAX_CLONE_PART_PER_GROUP + " references.");
}
Aggregations