use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class LoadCrossProjectDuplicationsRepositoryStepTest method call_compute_cpd_on_one_duplication.
@Test
public void call_compute_cpd_on_one_duplication() throws Exception {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
ComponentDto otherProject = createProject("OTHER_PROJECT_KEY");
SnapshotDto otherProjectSnapshot = createProjectSnapshot(otherProject);
ComponentDto otherFile = createFile("OTHER_FILE_KEY", otherProject);
String hash = "a8998353e96320ec";
DuplicationUnitDto duplicate = new DuplicationUnitDto().setHash(hash).setStartLine(40).setEndLine(55).setIndexInFile(0).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFile.uuid());
dbClient.duplicationDao().insert(dbSession, duplicate);
dbSession.commit();
ScannerReport.CpdTextBlock originBlock = ScannerReport.CpdTextBlock.newBuilder().setHash(hash).setStartLine(30).setEndLine(45).setStartTokenIndex(0).setEndTokenIndex(10).build();
batchReportReader.putDuplicationBlocks(FILE_REF, asList(originBlock));
underTest.execute();
verify(integrateCrossProjectDuplications).computeCpd(CURRENT_FILE, Arrays.asList(new Block.Builder().setResourceId(CURRENT_FILE_KEY).setBlockHash(new ByteArray(hash)).setIndexInFile(0).setLines(originBlock.getStartLine(), originBlock.getEndLine()).setUnit(originBlock.getStartTokenIndex(), originBlock.getEndTokenIndex()).build()), Arrays.asList(new Block.Builder().setResourceId(otherFile.getKey()).setBlockHash(new ByteArray(hash)).setIndexInFile(duplicate.getIndexInFile()).setLines(duplicate.getStartLine(), duplicate.getEndLine()).build()));
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class PackedMemoryCloneIndex method createBlock.
private Block createBlock(int index, String resourceId, @Nullable ByteArray byteHash) {
int offset = index * blockInts;
ByteArray blockHash;
if (byteHash == null) {
int[] hash = new int[hashInts];
for (int j = 0; j < hashInts; j++) {
hash[j] = blockData[offset++];
}
blockHash = new ByteArray(hash);
} else {
blockHash = byteHash;
offset += hashInts;
}
int indexInFile = blockData[offset++];
int firstLineNumber = blockData[offset++];
int lastLineNumber = blockData[offset++];
int startUnit = blockData[offset++];
int endUnit = blockData[offset];
return blockBuilder.setResourceId(resourceId).setBlockHash(blockHash).setIndexInFile(indexInFile).setLines(firstLineNumber, lastLineNumber).setUnit(startUnit, endUnit).build();
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class PmdBlockChunker method chunk.
/**
* @return ArrayList as we need a serializable object
*/
public List<Block> chunk(String resourceId, List<TokensLine> fragments) {
List<TokensLine> filtered = new ArrayList<>();
int i = 0;
while (i < fragments.size()) {
TokensLine first = fragments.get(i);
int j = i + 1;
while (j < fragments.size() && fragments.get(j).getValue().equals(first.getValue())) {
j++;
}
filtered.add(fragments.get(i));
if (i < j - 1) {
filtered.add(fragments.get(j - 1));
}
i = j;
}
fragments = filtered;
if (fragments.size() < blockSize) {
return new ArrayList<>();
}
TokensLine[] fragmentsArr = fragments.toArray(new TokensLine[fragments.size()]);
List<Block> blocks = new ArrayList<>(fragmentsArr.length - blockSize + 1);
long hash = 0;
int first = 0;
int last = 0;
for (; last < blockSize - 1; last++) {
hash = hash * PRIME_BASE + fragmentsArr[last].getHashCode();
}
Block.Builder blockBuilder = Block.builder().setResourceId(resourceId);
for (; last < fragmentsArr.length; last++, first++) {
TokensLine firstFragment = fragmentsArr[first];
TokensLine lastFragment = fragmentsArr[last];
// add last statement to hash
hash = hash * PRIME_BASE + lastFragment.getHashCode();
// create block
Block block = blockBuilder.setBlockHash(new ByteArray(hash)).setIndexInFile(first).setLines(firstFragment.getStartLine(), lastFragment.getEndLine()).setUnit(firstFragment.getStartUnit(), lastFragment.getEndUnit()).build();
blocks.add(block);
// remove first statement from hash
hash -= power * firstFragment.getHashCode();
}
return blocks;
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class PackedMemoryCloneIndexTest method test.
@Test
public void test() {
index.insert(newBlock("a", 1));
index.insert(newBlock("a", 2));
index.insert(newBlock("b", 1));
index.insert(newBlock("c", 1));
index.insert(newBlock("d", 1));
index.insert(newBlock("e", 1));
index.insert(newBlock("e", 2));
index.insert(newBlock("e", 3));
assertThat(index.noResources()).isEqualTo(5);
assertThat(index.getBySequenceHash(new ByteArray(1L)).size(), is(5));
assertThat(index.getBySequenceHash(new ByteArray(2L)).size(), is(2));
assertThat(index.getBySequenceHash(new ByteArray(3L)).size(), is(1));
assertThat(index.getBySequenceHash(new ByteArray(4L)).size(), is(0));
assertThat(index.getByResourceId("a").size(), is(2));
assertThat(index.getByResourceId("b").size(), is(1));
assertThat(index.getByResourceId("e").size(), is(3));
assertThat(index.getByResourceId("does not exist").size(), is(0));
}
use of org.sonar.duplications.block.ByteArray in project sonarqube by SonarSource.
the class PackedMemoryCloneIndexTest method should_construct_blocks_with_normalized_hash.
/**
* When: query by a hash value.
* Expected: all blocks should have same hash, which presented in the form of the same object.
*/
@Test
public void should_construct_blocks_with_normalized_hash() {
index.insert(newBlock("a", 1));
index.insert(newBlock("b", 1));
index.insert(newBlock("c", 1));
ByteArray requestedHash = new ByteArray(1L);
Collection<Block> blocks = index.getBySequenceHash(requestedHash);
assertThat(blocks.size(), is(3));
for (Block block : blocks) {
assertThat(block.getBlockHash(), sameInstance(requestedHash));
}
}
Aggregations