use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class DefaultCpdBlockIndexer method populateIndex.
private void populateIndex(String languageKey, List<InputFile> sourceFiles, CpdMapping mapping) {
TokenizerBridge bridge = new TokenizerBridge(mapping.getTokenizer(), fs.encoding().name(), getBlockSize(languageKey));
for (InputFile inputFile : sourceFiles) {
if (!index.isIndexed(inputFile)) {
LOG.debug("Populating index from {}", inputFile.absolutePath());
String resourceEffectiveKey = ((DefaultInputFile) inputFile).key();
List<Block> blocks = bridge.chunk(resourceEffectiveKey, inputFile.file());
index.insert(inputFile, blocks);
}
}
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class JavaCpdBlockIndexer method createIndex.
private void createIndex(Iterable<InputFile> sourceFiles) {
TokenChunker tokenChunker = JavaTokenProducer.build();
StatementChunker statementChunker = JavaStatementBuilder.build();
BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE);
for (InputFile inputFile : sourceFiles) {
LOG.debug("Populating index from {}", inputFile);
String resourceEffectiveKey = ((DefaultInputFile) inputFile).key();
List<Statement> statements;
try (InputStream is = new FileInputStream(inputFile.file());
Reader reader = new InputStreamReader(is, fs.encoding())) {
statements = statementChunker.chunk(tokenChunker.chunk(reader));
} catch (FileNotFoundException e) {
throw new IllegalStateException("Cannot find file " + inputFile.file(), e);
} catch (IOException e) {
throw new IllegalStateException("Exception handling file: " + inputFile.file(), e);
}
List<Block> blocks = blockChunker.chunk(resourceEffectiveKey, statements);
index.insert(inputFile, blocks);
}
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class CpdExecutorTest method timeout.
@Test
public void timeout() {
for (int i = 1; i <= 2; i++) {
DefaultInputFile component = createComponent("src/Foo" + i + ".php", 100);
List<Block> blocks = new ArrayList<>();
for (int j = 1; j <= 10000; j++) {
blocks.add(Block.builder().setResourceId(component.key()).setIndexInFile(j).setLines(j, j + 1).setUnit(j, j + 1).setBlockHash(new ByteArray("abcd1234".getBytes())).build());
}
index.insert((InputFile) component, blocks);
}
executor.execute(1);
readDuplications(0);
assertThat(logTester.logs(LoggerLevel.WARN)).usingElementComparator((l, r) -> l.matches(r) ? 0 : 1).containsOnly("Timeout during detection of duplications for .*Foo1.php", "Timeout during detection of duplications for .*Foo2.php");
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplication_for_java_even_when_no_token.
@Test
public void add_duplication_for_java_even_when_no_token() {
Component javaFile = builder(FILE, 1).setKey(ORIGIN_FILE_KEY).setFileAttributes(new FileAttributes(false, "java", 10)).build();
Collection<Block> originBlocks = singletonList(// This block contains 0 token
new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 45).setUnit(0, 0).build());
Collection<Block> duplicatedBlocks = singletonList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 55).build());
underTest.computeCpd(javaFile, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
use of org.sonar.duplications.block.Block in project sonarqube by SonarSource.
the class IntegrateCrossProjectDuplicationsTest method add_duplications_from_two_blocks.
@Test
public void add_duplications_from_two_blocks() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = asList(new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(30, 43).setUnit(0, 5).build(), new Block.Builder().setResourceId(ORIGIN_FILE_KEY).setBlockHash(new ByteArray("2b5747f0e4c59124")).setIndexInFile(1).setLines(32, 45).setUnit(5, 20).build());
Collection<Block> duplicatedBlocks = asList(new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("a8998353e96320ec")).setIndexInFile(0).setLines(40, 53).build(), new Block.Builder().setResourceId(OTHER_FILE_KEY).setBlockHash(new ByteArray("2b5747f0e4c59124")).setIndexInFile(1).setLines(42, 55).build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, duplicatedBlocks);
assertThat(duplicationRepository.getDuplications(ORIGIN_FILE)).containsExactly(crossProjectDuplication(new TextBlock(30, 45), OTHER_FILE_KEY, new TextBlock(40, 55)));
}
Aggregations