use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class SuffixTreeCloneDetectionAlgorithmTest method huge.
/**
* See SONAR-3060
* <p>
* In case when file contains a lot of duplicated blocks suffix-tree works better than original algorithm,
* which works more than 5 minutes for this example.
* </p><p>
* However should be noted that current implementation with suffix-tree also is not optimal,
* even if it works for this example couple of seconds,
* because duplications should be filtered in order to remove fully-covered.
* But such cases nearly never appear in real-world, so current implementation is acceptable for the moment.
* </p>
*/
@Test
public void huge() {
CloneIndex index = createIndex();
Block[] fileBlocks = new Block[5000];
for (int i = 0; i < 5000; i++) {
fileBlocks[i] = newBlock("x", new ByteArray("01"), i);
}
List<CloneGroup> result = detect(index, fileBlocks);
assertEquals(1, result.size());
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class PackedMemoryCloneIndexTest method attempt_to_find_hash_of_incorrect_size.
/**
* Given: index, which accepts blocks with 4-byte hash.
* Expected: exception during search by 8-byte hash.
*/
@Test(expected = IllegalArgumentException.class)
public void attempt_to_find_hash_of_incorrect_size() {
CloneIndex index = new PackedMemoryCloneIndex(4, 1);
index.getBySequenceHash(new ByteArray(1L));
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class DetectorTestCase method only_one_query_of_index_for_each_unique_hash.
/**
* Given: file with repeated hashes
* Expected: only one query of index for each unique hash
*/
@Test
public void only_one_query_of_index_for_each_unique_hash() {
CloneIndex index = spy(createIndex());
Block[] fileBlocks = newBlocks("a", "1 2 1 2");
detect(index, fileBlocks);
verify(index).getBySequenceHash(new ByteArray("01"));
verify(index).getBySequenceHash(new ByteArray("02"));
verifyNoMoreInteractions(index);
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class CpdExecutorTest method timeout.
@Test
public void timeout() {
for (int i = 1; i <= 2; i++) {
DefaultInputFile component = createComponent("src/Foo" + i + ".php", 100);
List<Block> blocks = new ArrayList<>();
for (int j = 1; j <= 10000; j++) {
blocks.add(Block.builder().setResourceId(component.key()).setIndexInFile(j).setLines(j, j + 1).setUnit(j, j + 1).setBlockHash(new ByteArray("abcd1234".getBytes())).build());
}
index.insert((InputFile) component, blocks);
}
executor.execute(1);
readDuplications(0);
assertThat(logTester.logs(LoggerLevel.WARN)).usingElementComparator((l, r) -> l.matches(r) ? 0 : 1).containsOnly("Timeout during detection of duplications for .*Foo1.php", "Timeout during detection of duplications for .*Foo2.php");
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_no_duplication_from_current_file.
@Test
public void add_no_duplication_from_current_file() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build(), // Duplication is on the same file
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(46, 60).setUnit(0, 10).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ed")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertNoDuplicationAdded(ORIGIN_FILE);
}
Aggregations