use of org.sonar.ce.task.step.TestComputationStepContext in project sonarqube by SonarSource.
the class FileMoveDetectionStepTest method execute_detects_no_move_if_content_of_file_is_not_similar_enough.
@Test
public void execute_detects_no_move_if_content_of_file_is_not_similar_enough() {
analysisMetadataHolder.setBaseAnalysis(ANALYSIS);
Component file1 = fileComponent(FILE_1_REF, null);
Component file2 = fileComponent(FILE_2_REF, LESS_CONTENT1);
insertFiles(file1.getDbKey());
insertContentOfFileInDb(file1.getDbKey(), CONTENT1);
setFilesInReport(file2);
TestComputationStepContext context = new TestComputationStepContext();
underTest.execute(context);
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
assertThat(scoreMatrixDumper.scoreMatrix.getMaxScore()).isPositive().isLessThan(MIN_REQUIRED_SCORE);
assertThat(addedFileRepository.getComponents()).contains(file2);
verifyStatistics(context, 1, 1, 1, 0);
}
use of org.sonar.ce.task.step.TestComputationStepContext in project sonarqube by SonarSource.
the class FileMoveDetectionStepTest method execute_detects_no_move_if_baseSnapshot_has_no_file.
@Test
public void execute_detects_no_move_if_baseSnapshot_has_no_file() {
analysisMetadataHolder.setBaseAnalysis(ANALYSIS);
Component file1 = fileComponent(FILE_1_REF, null);
Component file2 = fileComponent(FILE_2_REF, null);
setFilesInReport(file1, file2);
TestComputationStepContext context = new TestComputationStepContext();
underTest.execute(context);
assertThat(movedFilesRepository.getComponentsWithOriginal()).isEmpty();
assertThat(addedFileRepository.getComponents()).containsOnly(file1, file2);
verifyStatistics(context, 2, 0, 2, null);
}
use of org.sonar.ce.task.step.TestComputationStepContext in project sonarqube by SonarSource.
the class ExtractReportStepTest method unzip_report.
@Test
public void unzip_report() throws Exception {
logTester.setLevel(LoggerLevel.DEBUG);
File reportFile = generateReport();
try (InputStream input = FileUtils.openInputStream(reportFile)) {
dbTester.getDbClient().ceTaskInputDao().insert(dbTester.getSession(), TASK_UUID, input);
}
dbTester.getSession().commit();
dbTester.getSession().close();
underTest.execute(new TestComputationStepContext());
// directory contains the uncompressed report (which contains only metadata.pb in this test)
File unzippedDir = reportDirectoryHolder.getDirectory();
assertThat(unzippedDir).isDirectory().exists();
assertThat(unzippedDir.listFiles()).hasSize(1);
assertThat(new File(unzippedDir, "metadata.pb")).hasContent("{metadata}");
assertThat(logTester.logs(LoggerLevel.DEBUG)).anyMatch(log -> log.matches("Analysis report is \\d+ bytes uncompressed"));
}
use of org.sonar.ce.task.step.TestComputationStepContext in project sonarqube by SonarSource.
the class LoadCrossProjectDuplicationsRepositoryStepTest method nothing_to_do_when_cross_project_duplication_is_disabled.
@Test
public void nothing_to_do_when_cross_project_duplication_is_disabled() {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(false);
analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
ComponentDto otherProject = createProject("OTHER_PROJECT_KEY");
SnapshotDto otherProjectSnapshot = createProjectSnapshot(otherProject);
ComponentDto otherFIle = createFile("OTHER_FILE_KEY", otherProject);
String hash = "a8998353e96320ec";
DuplicationUnitDto duplicate = new DuplicationUnitDto().setHash(hash).setStartLine(40).setEndLine(55).setIndexInFile(0).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFIle.uuid());
dbClient.duplicationDao().insert(dbSession, duplicate);
dbSession.commit();
ScannerReport.CpdTextBlock originBlock = ScannerReport.CpdTextBlock.newBuilder().setHash(hash).setStartLine(30).setEndLine(45).setStartTokenIndex(0).setEndTokenIndex(10).build();
batchReportReader.putDuplicationBlocks(FILE_REF, singletonList(originBlock));
underTest.execute(new TestComputationStepContext());
verifyZeroInteractions(integrateCrossProjectDuplications);
}
use of org.sonar.ce.task.step.TestComputationStepContext in project sonarqube by SonarSource.
the class LoadCrossProjectDuplicationsRepositoryStepTest method call_compute_cpd_on_many_duplication.
@Test
public void call_compute_cpd_on_many_duplication() {
when(crossProjectDuplicationStatusHolder.isEnabled()).thenReturn(true);
analysisMetadataHolder.setBaseAnalysis(baseProjectAnalysis);
ComponentDto otherProject = createProject("OTHER_PROJECT_KEY");
SnapshotDto otherProjectSnapshot = createProjectSnapshot(otherProject);
ComponentDto otherFile = createFile("OTHER_FILE_KEY", otherProject);
ScannerReport.CpdTextBlock originBlock1 = ScannerReport.CpdTextBlock.newBuilder().setHash("a8998353e96320ec").setStartLine(30).setEndLine(45).setStartTokenIndex(0).setEndTokenIndex(10).build();
ScannerReport.CpdTextBlock originBlock2 = ScannerReport.CpdTextBlock.newBuilder().setHash("b1234353e96320ff").setStartLine(10).setEndLine(25).setStartTokenIndex(5).setEndTokenIndex(15).build();
batchReportReader.putDuplicationBlocks(FILE_REF, asList(originBlock1, originBlock2));
DuplicationUnitDto duplicate1 = new DuplicationUnitDto().setHash(originBlock1.getHash()).setStartLine(40).setEndLine(55).setIndexInFile(0).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFile.uuid());
DuplicationUnitDto duplicate2 = new DuplicationUnitDto().setHash(originBlock2.getHash()).setStartLine(20).setEndLine(35).setIndexInFile(1).setAnalysisUuid(otherProjectSnapshot.getUuid()).setComponentUuid(otherFile.uuid());
dbClient.duplicationDao().insert(dbSession, duplicate1);
dbClient.duplicationDao().insert(dbSession, duplicate2);
dbSession.commit();
underTest.execute(new TestComputationStepContext());
Class<ArrayList<Block>> listClass = (Class<ArrayList<Block>>) (Class) ArrayList.class;
ArgumentCaptor<ArrayList<Block>> originBlocks = ArgumentCaptor.forClass(listClass);
ArgumentCaptor<ArrayList<Block>> duplicationBlocks = ArgumentCaptor.forClass(listClass);
verify(integrateCrossProjectDuplications).computeCpd(eq(CURRENT_FILE), originBlocks.capture(), duplicationBlocks.capture());
Map<Integer, Block> originBlocksByIndex = blocksByIndexInFile(originBlocks.getValue());
assertThat(originBlocksByIndex.get(0)).isEqualTo(new Block.Builder().setResourceId(CURRENT_FILE_KEY).setBlockHash(new ByteArray(originBlock1.getHash())).setIndexInFile(0).setLines(originBlock1.getStartLine(), originBlock1.getEndLine()).setUnit(originBlock1.getStartTokenIndex(), originBlock1.getEndTokenIndex()).build());
assertThat(originBlocksByIndex.get(1)).isEqualTo(new Block.Builder().setResourceId(CURRENT_FILE_KEY).setBlockHash(new ByteArray(originBlock2.getHash())).setIndexInFile(1).setLines(originBlock2.getStartLine(), originBlock2.getEndLine()).setUnit(originBlock2.getStartTokenIndex(), originBlock2.getEndTokenIndex()).build());
Map<Integer, Block> duplicationBlocksByIndex = blocksByIndexInFile(duplicationBlocks.getValue());
assertThat(duplicationBlocksByIndex.get(0)).isEqualTo(new Block.Builder().setResourceId(otherFile.getDbKey()).setBlockHash(new ByteArray(originBlock1.getHash())).setIndexInFile(duplicate1.getIndexInFile()).setLines(duplicate1.getStartLine(), duplicate1.getEndLine()).build());
assertThat(duplicationBlocksByIndex.get(1)).isEqualTo(new Block.Builder().setResourceId(otherFile.getDbKey()).setBlockHash(new ByteArray(originBlock2.getHash())).setIndexInFile(duplicate2.getIndexInFile()).setLines(duplicate2.getStartLine(), duplicate2.getEndLine()).build());
}
Aggregations