use of org.sonar.duplications.index.ClonePart in project sonarqube by SonarSource.
the class JavaDuplicationsFunctionalTest method type3.
/**
* Does not support Type 3, however able to detect inner parts.
*/
@Test
public void type3() {
String fragment0 = source("public int getSoLinger() throws SocketException {", " Object o = impl.getOption( SocketOptions.SO_LINGER);", " if (o instanceof Integer) {", " return((Integer) o).intValue();", " }", " else return -1;", "}");
String fragment1 = source("public synchronized int getSoTimeout() throws SocketException {", " Object o = impl.getOption( SocketOptions.SO_TIMEOUT);", " if (o instanceof Integer) {", " return((Integer) o).intValue();", " }", " else return -0;", "}");
List<CloneGroup> duplications = detect2(fragment0, fragment1);
assertThat(duplications.size(), is(1));
ClonePart part = duplications.get(0).getOriginPart();
assertThat(part.getStartLine(), is(3));
assertThat(part.getEndLine(), is(6));
}
use of org.sonar.duplications.index.ClonePart in project sonarqube by SonarSource.
the class DetectorTestCase method same_lines_but_different_indexes.
/**
* Given file with two lines, containing following statements:
* <pre>
* 0: A,B,A,B
* 1: A,B,A
* </pre>
* with block size 5 each block will span both lines, and hashes will be:
* <pre>
* A,B,A,B,A=1
* B,A,B,A,B=2
* A,B,A,B,A=1
* </pre>
* Expected: one clone with two parts, which contain exactly the same lines
*/
@Test
public void same_lines_but_different_indexes() {
CloneIndex cloneIndex = createIndex();
Block.Builder block = Block.builder().setResourceId("a").setLines(0, 1);
Block[] fileBlocks = new Block[] { block.setBlockHash(new ByteArray("1".getBytes())).setIndexInFile(0).build(), block.setBlockHash(new ByteArray("2".getBytes())).setIndexInFile(1).build(), block.setBlockHash(new ByteArray("1".getBytes())).setIndexInFile(2).build() };
List<CloneGroup> clones = detect(cloneIndex, fileBlocks);
print(clones);
assertThat(clones.size(), is(1));
Iterator<CloneGroup> clonesIterator = clones.iterator();
CloneGroup clone = clonesIterator.next();
assertThat(clone.getCloneUnitLength(), is(1));
assertThat(clone.getCloneParts().size(), is(2));
assertThat(clone.getOriginPart(), is(new ClonePart("a", 0, 0, 1)));
assertThat(clone.getCloneParts(), hasItem(new ClonePart("a", 0, 0, 1)));
assertThat(clone.getCloneParts(), hasItem(new ClonePart("a", 2, 0, 1)));
}
use of org.sonar.duplications.index.ClonePart in project sonarqube by SonarSource.
the class CpdExecutorTest method reportTooManyDuplications.
@Test
public void reportTooManyDuplications() throws Exception {
// 1 origin part + 101 duplicates = 102
List<CloneGroup> dups = new ArrayList<>(CpdExecutor.MAX_CLONE_GROUP_PER_FILE + 1);
for (int i = 0; i < CpdExecutor.MAX_CLONE_GROUP_PER_FILE + 1; i++) {
ClonePart clonePart = new ClonePart(batchComponent1.key(), i, i, i + 1);
ClonePart dupPart = new ClonePart(batchComponent1.key(), i + 1, i + 1, i + 2);
dups.add(newCloneGroup(clonePart, dupPart));
}
executor.saveDuplications(batchComponent1, dups);
assertThat(reader.readComponentDuplications(batchComponent1.batchId())).hasSize(CpdExecutor.MAX_CLONE_GROUP_PER_FILE);
assertThat(logTester.logs(LoggerLevel.WARN)).contains("Too many duplication groups on file " + batchComponent1 + ". Keep only the first " + CpdExecutor.MAX_CLONE_GROUP_PER_FILE + " groups.");
}
Aggregations