Search in sources :

Example 11 with ByteArray

use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.

the class SuffixTreeCloneDetectionAlgorithmTest method huge.

/**
   * See SONAR-3060
   * <p>
   * In case when file contains a lot of duplicated blocks suffix-tree works better than original algorithm,
   * which works more than 5 minutes for this example.
   * </p><p>
   * However should be noted that current implementation with suffix-tree also is not optimal,
   * even if it works for this example couple of seconds,
   * because duplications should be filtered in order to remove fully-covered.
   * But such cases nearly never appear in real-world, so current implementation is acceptable for the moment.
   * </p>
   */
@Test
public void huge() {
    CloneIndex index = createIndex();
    Block[] fileBlocks = new Block[5000];
    for (int i = 0; i < 5000; i++) {
        fileBlocks[i] = newBlock("x", new ByteArray("01"), i);
    }
    List<CloneGroup> result = detect(index, fileBlocks);
    assertEquals(1, result.size());
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) CloneIndex(org.sonar.duplications.index.CloneIndex) CloneGroup(org.sonar.duplications.index.CloneGroup) CloneGroupMatcher.hasCloneGroup(org.sonar.duplications.detector.CloneGroupMatcher.hasCloneGroup) Test(org.junit.Test)

Example 12 with ByteArray

use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.

the class PackedMemoryCloneIndexTest method attempt_to_find_hash_of_incorrect_size.

/**
   * Given: index, which accepts blocks with 4-byte hash.
   * Expected: exception during search by 8-byte hash.
   */
@Test(expected = IllegalArgumentException.class)
public void attempt_to_find_hash_of_incorrect_size() {
    CloneIndex index = new PackedMemoryCloneIndex(4, 1);
    index.getBySequenceHash(new ByteArray(1L));
}
Also used : ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 13 with ByteArray

use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.

the class DetectorTestCase method only_one_query_of_index_for_each_unique_hash.

/**
   * Given: file with repeated hashes
   * Expected: only one query of index for each unique hash
   */
@Test
public void only_one_query_of_index_for_each_unique_hash() {
    CloneIndex index = spy(createIndex());
    Block[] fileBlocks = newBlocks("a", "1 2 1 2");
    detect(index, fileBlocks);
    verify(index).getBySequenceHash(new ByteArray("01"));
    verify(index).getBySequenceHash(new ByteArray("02"));
    verifyNoMoreInteractions(index);
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) MemoryCloneIndex(org.sonar.duplications.index.MemoryCloneIndex) CloneIndex(org.sonar.duplications.index.CloneIndex) Test(org.junit.Test)

Example 14 with ByteArray

use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.

the class CpdExecutorTest method timeout.

@Test
public void timeout() {
    for (int i = 1; i <= 2; i++) {
        DefaultInputFile component = createComponent("src/Foo" + i + ".php", 100);
        List<Block> blocks = new ArrayList<>();
        for (int j = 1; j <= 10000; j++) {
            blocks.add(Block.builder().setResourceId(component.key()).setIndexInFile(j).setLines(j, j + 1).setUnit(j, j + 1).setBlockHash(new ByteArray("abcd1234".getBytes())).build());
        }
        index.insert((InputFile) component, blocks);
    }
    executor.execute(1);
    readDuplications(0);
    assertThat(logTester.logs(LoggerLevel.WARN)).usingElementComparator((l, r) -> l.matches(r) ? 0 : 1).containsOnly("Timeout during detection of duplications for .*Foo1.php", "Timeout during detection of duplications for .*Foo2.php");
}
Also used : InputFile(org.sonar.api.batch.fs.InputFile) Duplication(org.sonar.scanner.protocol.output.ScannerReport.Duplication) Arrays(java.util.Arrays) InputComponentStore(org.sonar.scanner.scan.filesystem.InputComponentStore) CloneGroup(org.sonar.duplications.index.CloneGroup) SonarCpdBlockIndex(org.sonar.scanner.cpd.index.SonarCpdBlockIndex) DefaultInputFile(org.sonar.api.batch.fs.internal.DefaultInputFile) Block(org.sonar.duplications.block.Block) ScannerReportWriter(org.sonar.scanner.protocol.output.ScannerReportWriter) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ArrayList(java.util.ArrayList) Duplicate(org.sonar.scanner.protocol.output.ScannerReport.Duplicate) ClonePart(org.sonar.duplications.index.ClonePart) ScannerReportReader(org.sonar.scanner.protocol.output.ScannerReportReader) ExpectedException(org.junit.rules.ExpectedException) Before(org.junit.Before) ByteArray(org.sonar.duplications.block.ByteArray) PathResolver(org.sonar.api.scan.filesystem.PathResolver) CloseableIterator(org.sonar.core.util.CloseableIterator) IOException(java.io.IOException) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) File(java.io.File) TestInputFileBuilder(org.sonar.api.batch.fs.internal.TestInputFileBuilder) List(java.util.List) ReportPublisher(org.sonar.scanner.report.ReportPublisher) Rule(org.junit.Rule) LogTester(org.sonar.api.utils.log.LogTester) Settings(org.sonar.api.config.Settings) MapSettings(org.sonar.api.config.MapSettings) Collections(java.util.Collections) TemporaryFolder(org.junit.rules.TemporaryFolder) LoggerLevel(org.sonar.api.utils.log.LoggerLevel) Mockito.mock(org.mockito.Mockito.mock) DefaultInputFile(org.sonar.api.batch.fs.internal.DefaultInputFile) ArrayList(java.util.ArrayList) Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Example 15 with ByteArray

use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.

the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_from_current_file.

@Test
public void add_no_duplication_from_current_file() {
    settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
    Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build(), // Duplication is on the same file
    new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(46, 60).setUnit(0, 10).build());
    Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ed")).setIndexInFile(0).setLines(40, 55).build());
    underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
    assertNoDuplicationAdded(ORIGIN_FILE);
}
Also used : Block(org.sonar.duplications.block.Block) ByteArray(org.sonar.duplications.block.ByteArray) Test(org.junit.Test)

Aggregations

ByteArray (org.sonar.duplications.block.ByteArray)23 Block (org.sonar.duplications.block.Block)20 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)6 List (java.util.List)3 CloneGroup (org.sonar.duplications.index.CloneGroup)3 CloneIndex (org.sonar.duplications.index.CloneIndex)3 HashMap (java.util.HashMap)2 ComponentDto (org.sonar.db.component.ComponentDto)2 SnapshotDto (org.sonar.db.component.SnapshotDto)2 DuplicationUnitDto (org.sonar.db.duplication.DuplicationUnitDto)2 CloneGroupMatcher.hasCloneGroup (org.sonar.duplications.detector.CloneGroupMatcher.hasCloneGroup)2 ClonePart (org.sonar.duplications.index.ClonePart)2 MemoryCloneIndex (org.sonar.duplications.index.MemoryCloneIndex)2 ScannerReport (org.sonar.scanner.protocol.output.ScannerReport)2 File (java.io.File)1 IOException (java.io.IOException)1 Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 HashSet (java.util.HashSet)1