use of org.sonar.api.batch.fs.internal.DefaultInputFile in project sonarqube by SonarSource.
the class InputComponentStore method remove.
public InputComponentStore remove(InputFile inputFile) {
DefaultInputFile file = (DefaultInputFile) inputFile;
inputFileCache.remove(file.moduleKey(), inputFile.relativePath());
return this;
}
use of org.sonar.api.batch.fs.internal.DefaultInputFile in project sonarqube by SonarSource.
the class DefaultBlameOutput method blameResult.
@Override
public synchronized void blameResult(InputFile file, List<BlameLine> lines) {
Preconditions.checkNotNull(file);
Preconditions.checkNotNull(lines);
Preconditions.checkArgument(allFilesToBlame.contains(file), "It was not expected to blame file %s", file.relativePath());
if (lines.size() != file.lines()) {
LOG.debug("Ignoring blame result since provider returned {} blame lines but file {} has {} lines", lines.size(), file.relativePath(), file.lines());
return;
}
Builder scmBuilder = ScannerReport.Changesets.newBuilder();
DefaultInputFile inputFile = (DefaultInputFile) file;
scmBuilder.setComponentRef(inputFile.batchId());
Map<String, Integer> changesetsIdByRevision = new HashMap<>();
int lineId = 1;
for (BlameLine line : lines) {
validateLine(line, lineId, file);
Integer changesetId = changesetsIdByRevision.get(line.revision());
if (changesetId == null) {
addChangeset(scmBuilder, line);
changesetId = scmBuilder.getChangesetCount() - 1;
changesetsIdByRevision.put(line.revision(), changesetId);
}
scmBuilder.addChangesetIndexByLine(changesetId);
lineId++;
}
writer.writeComponentChangesets(scmBuilder.build());
allFilesToBlame.remove(file);
count++;
progressReport.message(count + "/" + total + " files analyzed");
}
use of org.sonar.api.batch.fs.internal.DefaultInputFile in project sonarqube by SonarSource.
the class DefaultSensorStorage method store.
@Override
public void store(DefaultCpdTokens defaultCpdTokens) {
DefaultInputFile inputFile = (DefaultInputFile) defaultCpdTokens.inputFile();
inputFile.setPublish(true);
PmdBlockChunker blockChunker = new PmdBlockChunker(getBlockSize(inputFile.language()));
List<Block> blocks = blockChunker.chunk(inputFile.key(), defaultCpdTokens.getTokenLines());
index.insert(inputFile, blocks);
}
use of org.sonar.api.batch.fs.internal.DefaultInputFile in project sonarqube by SonarSource.
the class BranchMediumTest method scanProjectWithBranch.
@Test
public void scanProjectWithBranch() throws IOException {
File srcDir = new File(baseDir, "src");
srcDir.mkdir();
File xooFile = new File(srcDir, "sample.xoo");
FileUtils.write(xooFile, "Sample xoo\ncontent");
TaskResult result = tester.newTask().properties(ImmutableMap.<String, String>builder().putAll(commonProps).put("sonar.branch", "branch").build()).start();
assertThat(result.inputFiles()).hasSize(1);
assertThat(result.inputFile("src/sample.xoo").key()).isEqualTo("com.foo.project:src/sample.xoo");
DefaultInputFile inputfile = (DefaultInputFile) result.inputFile("src/sample.xoo");
assertThat(result.getReportReader().readComponent(inputfile.batchId()).getPath()).isEqualTo("src/sample.xoo");
assertThat(result.getReportReader().readMetadata().getBranch()).isEqualTo("branch");
result = tester.newTask().properties(ImmutableMap.<String, String>builder().putAll(commonProps).put("sonar.branch", "").build()).start();
assertThat(result.inputFiles()).hasSize(1);
assertThat(result.inputFile("src/sample.xoo").key()).isEqualTo("com.foo.project:src/sample.xoo");
}
use of org.sonar.api.batch.fs.internal.DefaultInputFile in project sonarqube by SonarSource.
the class CpdMediumTest method testCrossModuleDuplications.
@Test
public void testCrossModuleDuplications() throws IOException {
builder.put("sonar.modules", "module1,module2").put("sonar.cpd.xoo.minimumTokens", "10").put("sonar.verbose", "true");
// module 1
builder.put("module1.sonar.projectKey", "module1");
builder.put("module1.sonar.projectName", "Module 1");
builder.put("module1.sonar.sources", ".");
// module2
builder.put("module2.sonar.projectKey", "module2");
builder.put("module2.sonar.projectName", "Module 2");
builder.put("module2.sonar.sources", ".");
File module1Dir = new File(baseDir, "module1");
File module2Dir = new File(baseDir, "module2");
module1Dir.mkdir();
module2Dir.mkdir();
String duplicatedStuff = "Sample xoo\ncontent\n" + "foo\nbar\ntoto\ntiti\n" + "foo\nbar\ntoto\ntiti\n" + "bar\ntoto\ntiti\n" + "foo\nbar\ntoto\ntiti";
// create duplicated file in both modules
File xooFile1 = new File(module1Dir, "sample1.xoo");
FileUtils.write(xooFile1, duplicatedStuff);
File xooFile2 = new File(module2Dir, "sample2.xoo");
FileUtils.write(xooFile2, duplicatedStuff);
TaskResult result = tester.newTask().properties(builder.build()).start();
assertThat(result.inputFiles()).hasSize(2);
InputFile inputFile1 = result.inputFile("sample1.xoo");
InputFile inputFile2 = result.inputFile("sample2.xoo");
// One clone group on each file
List<org.sonar.scanner.protocol.output.ScannerReport.Duplication> duplicationGroupsFile1 = result.duplicationsFor(inputFile1);
assertThat(duplicationGroupsFile1).hasSize(1);
org.sonar.scanner.protocol.output.ScannerReport.Duplication cloneGroupFile1 = duplicationGroupsFile1.get(0);
assertThat(cloneGroupFile1.getOriginPosition().getStartLine()).isEqualTo(1);
assertThat(cloneGroupFile1.getOriginPosition().getEndLine()).isEqualTo(17);
assertThat(cloneGroupFile1.getDuplicateList()).hasSize(1);
assertThat(cloneGroupFile1.getDuplicate(0).getOtherFileRef()).isEqualTo(result.getReportComponent(((DefaultInputFile) inputFile2).key()).getRef());
List<org.sonar.scanner.protocol.output.ScannerReport.Duplication> duplicationGroupsFile2 = result.duplicationsFor(inputFile2);
assertThat(duplicationGroupsFile2).hasSize(1);
org.sonar.scanner.protocol.output.ScannerReport.Duplication cloneGroupFile2 = duplicationGroupsFile2.get(0);
assertThat(cloneGroupFile2.getOriginPosition().getStartLine()).isEqualTo(1);
assertThat(cloneGroupFile2.getOriginPosition().getEndLine()).isEqualTo(17);
assertThat(cloneGroupFile2.getDuplicateList()).hasSize(1);
assertThat(cloneGroupFile2.getDuplicate(0).getOtherFileRef()).isEqualTo(result.getReportComponent(((DefaultInputFile) inputFile1).key()).getRef());
assertThat(result.duplicationBlocksFor(inputFile1)).isEmpty();
}
Aggregations