Search in sources :

Example 6 with Block

use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.

the class IntegrateCrossProjectDuplicationsTest method add_duplications_from_a_single_block.

@Test
public void add_duplications_from_a_single_block() {
    settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
    Collection<Block> originBlocks = singletonList(// This block contains 11 tokens -> a duplication will be created
    new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build());
    Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
    underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
    assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 7 with Block

use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.

the class IntegrateCrossProjectDuplicationsTest method default_minimum_tokens_is_one_hundred.

@Test
public void default_minimum_tokens_is_one_hundred() {
    settings.setProperty("sonar.cpd.xoo.minimumTokens", (Integer) null);
    Collection<Block> originBlocks = singletonList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 100).build());
    Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
    underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
    assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 8 with Block

use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.

the class IntegrateCrossProjectDuplicationsTest method do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications.

@Test
public void do_not_compute_more_than_one_hundred_duplications_when_too_many_duplications() throws Exception {
    Collection<Block> originBlocks = new ArrayList<>();
    Collection<Block> duplicatedBlocks = new ArrayList<>();
    Block.Builder blockBuilder = new Block.Builder().setIndexInFile(0).setLines(30, 45).setUnit(0, 100);
    // Generate more than 100 duplication on different files
    for (int i = 0; i < 110; i++) {
        String hash = padStart("hash" + i, 16, 'a');
        originBlocks.add(blockBuilder.setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray(hash)).build());
        duplicatedBlocks.add(blockBuilder.setResourceId("resource" + i).setBlockHash(new ByteArray(hash)).build());
    }
    underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
    assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).hasSize(100);
    assertThat(logTester.logs(LoggerLevel.WARN)).containsOnly("Too many duplication groups on file " + ORIGIN_FILE_KEY + ". Keeping only the first 100 groups.");
}
Also used : ArrayList(java.util.ArrayList) Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 9 with Block

use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.

the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_when_not_enough_tokens.

@Test
public void add_no_duplication_when_not_enough_tokens() {
    settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
    Collection<Block> originBlocks = singletonList(// This block contains 5 tokens -> not enough to consider it as a duplication
    new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 4).build());
    Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
    underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
    assertNoDuplicationAdded(ORIGIN_FILE);
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 10 with Block

use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.

the class LoadCrossProjectDuplicationsRepositoryStepTest method call_compute_cpd_on_many_duplication.

@Test
public void call_compute_cpd_on_many_duplication() throws Exception {
    when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
    analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
    ComponentDto otherProject = createProject("OTHER_PROJECT_KEY");
    SnapshotDto otherProjectSnapshot = createProjectSnapshot(otherProject);
    ComponentDto otherFile = createFile("OTHER_FILE_KEY", otherProject);
    ScannerReport.CpdTextBlock originBlock1 = ScannerReport.CpdTextBlock.newBuilder().setHash("a8998353e96320ec").setStartLine(30).setEndLine(45).setStartTokenIndex(0).setEndTokenIndex(10).build();
    ScannerReport.CpdTextBlock originBlock2 = ScannerReport.CpdTextBlock.newBuilder().setHash("b1234353e96320ff").setStartLine(10).setEndLine(25).setStartTokenIndex(5).setEndTokenIndex(15).build();
    batchReportReader.putDuplicationBlocks(FILE_REF, asList(originBlock1, originBlock2));
    DuplicationUnitDto duplicate1 = new DuplicationUnitDto().setHash(originBlock1.getHash()).setStartLine(40).setEndLine(55).setIndexInFile(0).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFile.uuid());
    DuplicationUnitDto duplicate2 = new DuplicationUnitDto().setHash(originBlock2.getHash()).setStartLine(20).setEndLine(35).setIndexInFile(1).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFile.uuid());
    dbClient.duplicationDao().insert(dbSession, duplicate1);
    dbClient.duplicationDao().insert(dbSession, duplicate2);
    dbSession.commit();
    underTest.execute();
    Class<ArrayList<Block>> listClass = (Class<ArrayList<Block>>) (Class) ArrayList.class;
    ArgumentCaptor<ArrayList<Block>> originBlocks = ArgumentCaptor.forClass(listClass);
    ArgumentCaptor<ArrayList<Block>> duplicationBlocks = ArgumentCaptor.forClass(listClass);
    verify(integrateCrossProjectDuplications).computeCpd(eq(CURRENT_FILE), originBlocks.capture(), duplicationBlocks.capture());
    Map<Integer, Block> originBlocksByIndex = blocksByIndexInFile(originBlocks.getValue());
    assertThat(originBlocksByIndex.get(0)).isEqualTo(new Block.Builder().setResourceId(CURRENT_FILE_KEY).setBlockHash(new ByteArray(originBlock1.getHash())).setIndexInFile(0).setLines(originBlock1.getStartLine(), originBlock1.getEndLine()).setUnit(originBlock1.getStartTokenIndex(), originBlock1.getEndTokenIndex()).build());
    assertThat(originBlocksByIndex.get(1)).isEqualTo(new Block.Builder().setResourceId(CURRENT_FILE_KEY).setBlockHash(new ByteArray(originBlock2.getHash())).setIndexInFile(1).setLines(originBlock2.getStartLine(), originBlock2.getEndLine()).setUnit(originBlock2.getStartTokenIndex(), originBlock2.getEndTokenIndex()).build());
    Map<Integer, Block> duplicationBlocksByIndex = blocksByIndexInFile(duplicationBlocks.getValue());
    assertThat(duplicationBlocksByIndex.get(0)).isEqualTo(new Block.Builder().setResourceId(otherFile.getKey()).setBlockHash(new ByteArray(originBlock1.getHash())).setIndexInFile(duplicate1.getIndexInFile()).setLines(duplicate1.getStartLine(), duplicate1.getEndLine()).build());
    assertThat(duplicationBlocksByIndex.get(1)).isEqualTo(new Block.Builder().setResourceId(otherFile.getKey()).setBlockHash(new ByteArray(originBlock2.getHash())).setIndexInFile(duplicate2.getIndexInFile()).setLines(duplicate2.getStartLine(), duplicate2.getEndLine()).build());
}
Also used : DuplicationUnitDto(org.sonar.db.duplication.DuplicationUnitDto) SnapshotDto(org.sonar.db.component.SnapshotDto) ComponentDto(org.sonar.db.component.ComponentDto) ScannerReport(org.sonar.scanner.protocol.output.ScannerReport) ArrayList(java.util.ArrayList) Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Aggregations

Block (org.sonar.duplications.block.Block)56 Test (org.junit.Test)37 ByteArray (org.sonar.duplications.block.ByteArray)29 CloneGroup (org.sonar.duplications.index.CloneGroup)18 CloneIndex (org.sonar.duplications.index.CloneIndex)16 CloneGroupMatcher.hasCloneGroup (org.sonar.duplications.detector.CloneGroupMatcher.hasCloneGroup)15 ArrayList (java.util.ArrayList)13 MemoryCloneIndex (org.sonar.duplications.index.MemoryCloneIndex)12 DefaultInputFile (org.sonar.api.batch.fs.internal.DefaultInputFile)8 ClonePart (org.sonar.duplications.index.ClonePart)5 IOException (java.io.IOException)4 List (java.util.List)4 InputFile (org.sonar.api.batch.fs.InputFile)4 Statement (org.sonar.duplications.statement.Statement)3 File (java.io.File)2 FileNotFoundException (java.io.FileNotFoundException)2 Arrays (java.util.Arrays)2 Collections (java.util.Collections)2 HashMap (java.util.HashMap)2 Assertions.assertThat (org.assertj.core.api.Assertions.assertThat)2