use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class DetectorTestCase method only_one_query_of_index_for_each_unique_hash.
/**
* Given: file with repeated hashes
* Expected: only one query of index for each unique hash
*/
@Test
public void only_one_query_of_index_for_each_unique_hash() {
CloneIndex index = spy(createIndex());
Block[] fileBlocks = newBlocks("a", "1 2 1 2");
detect(index, fileBlocks);
verify(index).getBySequenceHash(new ByteArray("01"));
verify(index).getBySequenceHash(new ByteArray("02"));
verifyNoMoreInteractions(index);
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class CpdExecutorTest method timeout.
@Test
public void timeout() {
for (int i = 1; i <= 2; i++) {
DefaultInputFile component = createComponent("src/Foo" + i + ".php", 100);
List<Block> blocks = new ArrayList<>();
for (int j = 1; j <= 10000; j++) {
blocks.add(Block.builder().setResourceId(component.key()).setIndexInFile(j).setLines(j, j + 1).setUnit(j, j + 1).setBlockHash(new ByteArray("abcd1234".getBytes())).build());
}
index.insert((InputFile) component, blocks);
}
executor.execute(1);
readDuplications(0);
assertThat(logTester.logs(LoggerLevel.WARN)).usingElementComparator((l, r) -> l.matches(r) ? 0 : 1).containsOnly("Timeout during detection of duplications for .*Foo1.php", "Timeout during detection of duplications for .*Foo2.php");
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplication_for_java_even_when_no_token.
@Test
public void add_duplication_for_java_even_when_no_token() {
Component javaFile = builder(FILE, 1).setKey(ORIGIN_FILE_KEY).setFileAttributes(new FileAttributes(false, "java", 10)).build();
Collection<Block> originBlocks = singletonList(// This block contains 0 token
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 0).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplications_from_two_blocks.
@Test
public void add_duplications_from_two_blocks() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 43).setUnit(0, 5).build(), new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("2b5747f0e4c59124")).setIndexInFile(1).setLines(32, 45).setUnit(5, 20).build());
Collection<Block> duplicatedBlocks = asList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 53).build(), new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("2b5747f0e4c59124")).setIndexInFile(1).setLines(42, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplications_from_a_single_block.
@Test
public void add_duplications_from_a_single_block() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = singletonList(// This block contains 11 tokens -> a duplication will be created
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 10).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
Aggregations