use of com.intellij.dupLocator.treeHash.FragmentsCollector in project intellij-community by JetBrains.
the class DuplicatesInspectionBase method checkFile.
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull final PsiFile psiFile, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
final VirtualFile virtualFile = psiFile.getVirtualFile();
if (!(virtualFile instanceof VirtualFileWithId) || /*!isOnTheFly || */
!DuplicatesIndex.ourEnabled)
return ProblemDescriptor.EMPTY_ARRAY;
final DuplicatesProfile profile = DuplicatesIndex.findDuplicatesProfile(psiFile.getFileType());
if (profile == null)
return ProblemDescriptor.EMPTY_ARRAY;
final Ref<DuplicatedCodeProcessor> myProcessorRef = new Ref<>();
final FileASTNode node = psiFile.getNode();
boolean usingLightProfile = profile instanceof LightDuplicateProfile && node.getElementType() instanceof ILightStubFileElementType && DuplicatesIndex.ourEnabledLightProfiles;
if (usingLightProfile) {
LighterAST ast = node.getLighterAST();
((LightDuplicateProfile) profile).process(ast, new LightDuplicateProfile.Callback() {
DuplicatedCodeProcessor<LighterASTNode> myProcessor;
@Override
public void process(int hash, int hash2, @NotNull final LighterAST ast, @NotNull final LighterASTNode... nodes) {
class LightDuplicatedCodeProcessor extends DuplicatedCodeProcessor<LighterASTNode> {
private LightDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(LighterASTNode node) {
return null;
}
@Override
protected PsiElement getPsi(LighterASTNode node) {
return ((TreeBackedLighterAST) ast).unwrap(node).getPsi();
}
@Override
protected int getStartOffset(LighterASTNode node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(LighterASTNode node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return true;
}
}
if (myProcessor == null) {
myProcessor = new LightDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, hash2, nodes[0]);
}
});
} else {
final DuplocatorState state = profile.getDuplocatorState(psiFile.getLanguage());
profile.createVisitor(new FragmentsCollector() {
DuplicatedCodeProcessor<PsiFragment> myProcessor;
@Override
public void add(int hash, final int cost, @Nullable final PsiFragment frag) {
if (!DuplicatesIndex.isIndexedFragment(frag, cost, profile, state)) {
return;
}
class OldDuplicatedCodeProcessor extends DuplicatedCodeProcessor<PsiFragment> {
private OldDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(PsiFragment node) {
PsiElement[] elements = node.getElements();
TextRange rangeInElement = null;
if (elements.length > 1) {
PsiElement lastElement = elements[elements.length - 1];
rangeInElement = new TextRange(elements[0].getStartOffsetInParent(), lastElement.getStartOffsetInParent() + lastElement.getTextLength());
}
return rangeInElement;
}
@Override
protected PsiElement getPsi(PsiFragment node) {
PsiElement[] elements = node.getElements();
return elements.length > 1 ? elements[0].getParent() : elements[0];
}
@Override
protected int getStartOffset(PsiFragment node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(PsiFragment node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return false;
}
}
if (myProcessor == null) {
myProcessor = new OldDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, 0, frag);
}
}, true).visitNode(psiFile);
}
DuplicatedCodeProcessor<?> processor = myProcessorRef.get();
final SmartList<ProblemDescriptor> descriptors = new SmartList<>();
if (processor != null) {
final VirtualFile baseDir = psiFile.getProject().getBaseDir();
for (Map.Entry<Integer, TextRange> entry : processor.reportedRanges.entrySet()) {
final Integer offset = entry.getKey();
if (!usingLightProfile && processor.fragmentSize.get(offset) < MIN_FRAGMENT_SIZE)
continue;
final VirtualFile file = processor.reportedFiles.get(offset);
String path = null;
if (file.equals(virtualFile))
path = "this file";
else if (baseDir != null) {
path = VfsUtilCore.getRelativePath(file, baseDir);
}
if (path == null) {
path = file.getPath();
}
String message = "Found duplicated code in " + path;
PsiElement targetElement = processor.reportedPsi.get(offset);
TextRange rangeInElement = entry.getValue();
final int offsetInOtherFile = processor.reportedOffsetInOtherFiles.get(offset);
LocalQuickFix fix = createNavigateToDupeFix(file, offsetInOtherFile);
long hash = processor.fragmentHash.get(offset);
LocalQuickFix viewAllDupesFix = hash != 0 ? createShowOtherDupesFix(virtualFile, offset, (int) hash, (int) (hash >> 32), psiFile.getProject()) : null;
ProblemDescriptor descriptor = manager.createProblemDescriptor(targetElement, rangeInElement, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, fix, viewAllDupesFix);
descriptors.add(descriptor);
}
}
return descriptors.isEmpty() ? null : descriptors.toArray(new ProblemDescriptor[descriptors.size()]);
}
Aggregations