use of org.sonar.ce.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer in project sonarqube by SonarSource.
the class FileSourceDataComputerTest method compute_calls_read_for_each_line_and_passe_read_error_to_fileSourceDataWarnings.
@Test
public void compute_calls_read_for_each_line_and_passe_read_error_to_fileSourceDataWarnings() {
int lineCount = 1 + new Random().nextInt(10);
List<String> lines = IntStream.range(0, lineCount).mapToObj(i -> "line" + i).collect(toList());
when(sourceLinesRepository.readLines(FILE)).thenReturn(CloseableIterator.from(lines.iterator()));
when(sourceLineReadersFactory.getLineReaders(FILE)).thenReturn(lineReaders);
when(sourceLinesHashRepository.getLineHashesComputerToPersist(FILE)).thenReturn(lineHashesComputer);
// mock an implementation that will call the ReadErrorConsumer in order to verify that the provided consumer is
// doing what we expect: pass readError to fileSourceDataWarnings
int randomStartPoint = new Random().nextInt(500);
doAnswer(new Answer() {
int i = randomStartPoint;
@Override
public Object answer(InvocationOnMock invocation) {
Consumer<LineReader.ReadError> readErrorConsumer = invocation.getArgument(1);
readErrorConsumer.accept(new LineReader.ReadError(LineReader.Data.SYMBOLS, i++));
return null;
}
}).when(lineReaders).read(any(), any());
underTest.compute(FILE, fileSourceDataWarnings);
ArgumentCaptor<DbFileSources.Line.Builder> lineBuilderCaptor = ArgumentCaptor.forClass(DbFileSources.Line.Builder.class);
verify(lineReaders, times(lineCount)).read(lineBuilderCaptor.capture(), any());
assertThat(lineBuilderCaptor.getAllValues()).extracting(DbFileSources.Line.Builder::getSource).containsOnlyElementsOf(lines);
assertThat(lineBuilderCaptor.getAllValues()).extracting(DbFileSources.Line.Builder::getLine).containsExactly(IntStream.range(1, lineCount + 1).boxed().toArray(Integer[]::new));
ArgumentCaptor<LineReader.ReadError> readErrorCaptor = ArgumentCaptor.forClass(LineReader.ReadError.class);
verify(fileSourceDataWarnings, times(lineCount)).addWarning(same(FILE), readErrorCaptor.capture());
assertThat(readErrorCaptor.getAllValues()).extracting(LineReader.ReadError::getLine).containsExactly(IntStream.range(randomStartPoint, randomStartPoint + lineCount).boxed().toArray(Integer[]::new));
}
use of org.sonar.ce.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer in project sonarqube by SonarSource.
the class SourceLinesHashRepositoryImplTest method should_persist_without_significant_code_from_cache_if_possible.
@Test
public void should_persist_without_significant_code_from_cache_if_possible() {
List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.empty());
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
}
use of org.sonar.ce.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer in project sonarqube by SonarSource.
the class SourceLinesHashRepositoryImplTest method should_persist_with_significant_code_from_cache_if_possible.
@Test
public void should_persist_with_significant_code_from_cache_if_possible() {
List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
LineRange[] lineRanges = { new LineRange(0, 1), null, new LineRange(1, 5) };
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(false);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
assertThat(hashesComputer).isInstanceOf(CachedLineHashesComputer.class);
assertThat(hashesComputer.getResult()).isEqualTo(lineHashes);
}
use of org.sonar.ce.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer in project sonarqube by SonarSource.
the class SourceLinesHashRepositoryImplTest method should_generate_to_persist_if_needed.
@Test
public void should_generate_to_persist_if_needed() {
List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3");
LineRange[] lineRanges = { new LineRange(0, 1), null, new LineRange(1, 5) };
sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes);
// DB has line hashes without significant code and significant code is available in the report, so we need to generate new line hashes
when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true);
when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges));
LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file);
assertThat(hashesComputer).isInstanceOf(SignificantCodeLineHashesComputer.class);
}
use of org.sonar.ce.task.projectanalysis.source.SourceLinesHashRepositoryImpl.LineHashesComputer in project sonarqube by SonarSource.
the class FileSourceDataComputerTest method compute_builds_data_object_from_lines.
@Test
public void compute_builds_data_object_from_lines() {
int lineCount = 1 + new Random().nextInt(10);
int randomStartPoint = new Random().nextInt(500);
List<String> lines = IntStream.range(0, lineCount).mapToObj(i -> "line" + i).collect(toList());
List<String> expectedLineHashes = IntStream.range(0, 1 + new Random().nextInt(12)).mapToObj(i -> "str_" + i).collect(toList());
Changeset expectedChangeset = Changeset.newChangesetBuilder().setDate((long) new Random().nextInt(9_999)).build();
String expectedSrcHash = computeSrcHash(lines);
CloseableIterator<String> lineIterator = spy(CloseableIterator.from(lines.iterator()));
DbFileSources.Data.Builder expectedLineDataBuilder = DbFileSources.Data.newBuilder();
for (int i = 0; i < lines.size(); i++) {
expectedLineDataBuilder.addLinesBuilder().setSource(lines.get(i)).setLine(i + 1).setScmAuthor("reader_called_" + (randomStartPoint + i));
}
when(sourceLinesRepository.readLines(FILE)).thenReturn(lineIterator);
when(sourceLineReadersFactory.getLineReaders(FILE)).thenReturn(lineReaders);
when(sourceLinesHashRepository.getLineHashesComputerToPersist(FILE)).thenReturn(lineHashesComputer);
when(lineHashesComputer.getResult()).thenReturn(expectedLineHashes);
when(lineReaders.getLatestChangeWithRevision()).thenReturn(expectedChangeset);
// mocked implementation of LineReader.read to ensure changes done by it to the lineBuilder argument actually end
// up in the FileSourceDataComputer.Data object returned
doAnswer(new Answer() {
int i = 0;
@Override
public Object answer(InvocationOnMock invocation) {
DbFileSources.Line.Builder lineBuilder = invocation.getArgument(0);
lineBuilder.setScmAuthor("reader_called_" + (randomStartPoint + i++));
return null;
}
}).when(lineReaders).read(any(), any());
FileSourceDataComputer.Data data = underTest.compute(FILE, fileSourceDataWarnings);
assertThat(data.getLineHashes()).isEqualTo(expectedLineHashes);
assertThat(data.getSrcHash()).isEqualTo(expectedSrcHash);
assertThat(data.getLatestChangeWithRevision()).isSameAs(expectedChangeset);
assertThat(data.getLineData()).isEqualTo(expectedLineDataBuilder.build());
verify(lineIterator).close();
verify(lineReaders).close();
}
Aggregations