use of com.intellij.dupLocator.util.PsiFragment in project intellij-community by JetBrains.
the class DuplicatesInspectionBase method checkFile.
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull final PsiFile psiFile, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
final VirtualFile virtualFile = psiFile.getVirtualFile();
if (!(virtualFile instanceof VirtualFileWithId) || /*!isOnTheFly || */
!DuplicatesIndex.ourEnabled)
return ProblemDescriptor.EMPTY_ARRAY;
final DuplicatesProfile profile = DuplicatesIndex.findDuplicatesProfile(psiFile.getFileType());
if (profile == null)
return ProblemDescriptor.EMPTY_ARRAY;
final Ref<DuplicatedCodeProcessor> myProcessorRef = new Ref<>();
final FileASTNode node = psiFile.getNode();
boolean usingLightProfile = profile instanceof LightDuplicateProfile && node.getElementType() instanceof ILightStubFileElementType && DuplicatesIndex.ourEnabledLightProfiles;
if (usingLightProfile) {
LighterAST ast = node.getLighterAST();
((LightDuplicateProfile) profile).process(ast, new LightDuplicateProfile.Callback() {
DuplicatedCodeProcessor<LighterASTNode> myProcessor;
@Override
public void process(int hash, int hash2, @NotNull final LighterAST ast, @NotNull final LighterASTNode... nodes) {
class LightDuplicatedCodeProcessor extends DuplicatedCodeProcessor<LighterASTNode> {
private LightDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(LighterASTNode node) {
return null;
}
@Override
protected PsiElement getPsi(LighterASTNode node) {
return ((TreeBackedLighterAST) ast).unwrap(node).getPsi();
}
@Override
protected int getStartOffset(LighterASTNode node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(LighterASTNode node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return true;
}
}
if (myProcessor == null) {
myProcessor = new LightDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, hash2, nodes[0]);
}
});
} else {
final DuplocatorState state = profile.getDuplocatorState(psiFile.getLanguage());
profile.createVisitor(new FragmentsCollector() {
DuplicatedCodeProcessor<PsiFragment> myProcessor;
@Override
public void add(int hash, final int cost, @Nullable final PsiFragment frag) {
if (!DuplicatesIndex.isIndexedFragment(frag, cost, profile, state)) {
return;
}
class OldDuplicatedCodeProcessor extends DuplicatedCodeProcessor<PsiFragment> {
private OldDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(PsiFragment node) {
PsiElement[] elements = node.getElements();
TextRange rangeInElement = null;
if (elements.length > 1) {
PsiElement lastElement = elements[elements.length - 1];
rangeInElement = new TextRange(elements[0].getStartOffsetInParent(), lastElement.getStartOffsetInParent() + lastElement.getTextLength());
}
return rangeInElement;
}
@Override
protected PsiElement getPsi(PsiFragment node) {
PsiElement[] elements = node.getElements();
return elements.length > 1 ? elements[0].getParent() : elements[0];
}
@Override
protected int getStartOffset(PsiFragment node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(PsiFragment node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return false;
}
}
if (myProcessor == null) {
myProcessor = new OldDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, 0, frag);
}
}, true).visitNode(psiFile);
}
DuplicatedCodeProcessor<?> processor = myProcessorRef.get();
final SmartList<ProblemDescriptor> descriptors = new SmartList<>();
if (processor != null) {
final VirtualFile baseDir = psiFile.getProject().getBaseDir();
for (Map.Entry<Integer, TextRange> entry : processor.reportedRanges.entrySet()) {
final Integer offset = entry.getKey();
if (!usingLightProfile && processor.fragmentSize.get(offset) < MIN_FRAGMENT_SIZE)
continue;
final VirtualFile file = processor.reportedFiles.get(offset);
String path = null;
if (file.equals(virtualFile))
path = "this file";
else if (baseDir != null) {
path = VfsUtilCore.getRelativePath(file, baseDir);
}
if (path == null) {
path = file.getPath();
}
String message = "Found duplicated code in " + path;
PsiElement targetElement = processor.reportedPsi.get(offset);
TextRange rangeInElement = entry.getValue();
final int offsetInOtherFile = processor.reportedOffsetInOtherFiles.get(offset);
LocalQuickFix fix = createNavigateToDupeFix(file, offsetInOtherFile);
long hash = processor.fragmentHash.get(offset);
LocalQuickFix viewAllDupesFix = hash != 0 ? createShowOtherDupesFix(virtualFile, offset, (int) hash, (int) (hash >> 32), psiFile.getProject()) : null;
ProblemDescriptor descriptor = manager.createProblemDescriptor(targetElement, rangeInElement, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, fix, viewAllDupesFix);
descriptors.add(descriptor);
}
}
return descriptors.isEmpty() ? null : descriptors.toArray(new ProblemDescriptor[descriptors.size()]);
}
use of com.intellij.dupLocator.util.PsiFragment in project intellij-community by JetBrains.
the class AbstractTreeHasher method computeElementHash.
/**
* Computes element hash using children hashes.
* Creates only single PsiFragment.
*/
protected TreeHashResult computeElementHash(@NotNull final PsiElement root, final PsiFragment upper, final NodeSpecificHasher hasher) {
if (myForIndexing) {
return TreeHashingUtils.computeElementHashForIndexing(this, myCallBack, root, upper, hasher);
}
ProgressManager.checkCanceled();
final List<PsiElement> children = hasher.getNodeChildren(root);
final int size = children.size();
final int[] childHashes = new int[size];
final int[] childCosts = new int[size];
final PsiFragment fragment = buildFragment(hasher, root, getCost(root));
if (upper != null) {
fragment.setParent(upper);
}
if (size == 0 && !(root instanceof LeafElement)) {
return new TreeHashResult(hasher.getNodeHash(root), hasher.getNodeCost(root), fragment);
}
for (int i = 0; i < size; i++) {
final TreeHashResult res = hash(children.get(i), fragment, hasher);
childHashes[i] = res.getHash();
childCosts[i] = res.getCost();
}
final int c = hasher.getNodeCost(root) + vector(childCosts);
final int h1 = hasher.getNodeHash(root);
final int discardCost = getDiscardCost(root);
for (int i = 0; i < size; i++) {
if (childCosts[i] <= discardCost && ignoreChildHash(children.get(i))) {
childHashes[i] = 0;
}
}
final int h = h1 + vector(childHashes);
if (myCallBack != null) {
myCallBack.add(h, c, fragment);
}
return new TreeHashResult(h, c, fragment);
}
use of com.intellij.dupLocator.util.PsiFragment in project intellij-community by JetBrains.
the class DuplocatorHashCallback method writeFragments.
@SuppressWarnings({ "HardCodedStringLiteral" })
private static void writeFragments(final List<PsiFragment> psiFragments, final PrettyPrintWriter writer, Project project, final boolean shouldWriteOffsets) {
final PathMacroManager macroManager = PathMacroManager.getInstance(project);
final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project);
for (PsiFragment fragment : psiFragments) {
final PsiFile psiFile = fragment.getFile();
final VirtualFile virtualFile = psiFile != null ? psiFile.getVirtualFile() : null;
if (virtualFile != null) {
writer.startNode("fragment");
writer.addAttribute("file", macroManager.collapsePath(virtualFile.getUrl()));
if (shouldWriteOffsets) {
final Document document = documentManager.getDocument(psiFile);
LOG.assertTrue(document != null);
int startOffset = fragment.getStartOffset();
final int line = document.getLineNumber(startOffset);
writer.addAttribute("line", String.valueOf(line));
final int lineStartOffset = document.getLineStartOffset(line);
if (StringUtil.isEmptyOrSpaces(document.getText().substring(lineStartOffset, startOffset))) {
startOffset = lineStartOffset;
}
writer.addAttribute("start", String.valueOf(startOffset));
writer.addAttribute("end", String.valueOf(fragment.getEndOffset()));
if (fragment.containsMultipleFragments()) {
final int[][] offsets = fragment.getOffsets();
for (int[] offset : offsets) {
writer.startNode("offset");
writer.addAttribute("start", String.valueOf(offset[0]));
writer.addAttribute("end", String.valueOf(offset[1]));
writer.endNode();
}
}
}
writer.endNode();
}
}
}
use of com.intellij.dupLocator.util.PsiFragment in project intellij-community by JetBrains.
the class DuplocatorHashCallback method report.
@SuppressWarnings({ "HardCodedStringLiteral" })
public void report(String path, final Project project) throws IOException {
int[] hashCodes = myDuplicates.keys();
FileWriter fileWriter = null;
//fragments
try {
fileWriter = new FileWriter(path + File.separator + "fragments.xml");
PrettyPrintWriter writer = new PrettyPrintWriter(fileWriter);
writer.startNode("root");
for (int hash : hashCodes) {
List<List<PsiFragment>> dupList = myDuplicates.get(hash);
writer.startNode("hash");
writer.addAttribute("val", String.valueOf(hash));
for (final List<PsiFragment> psiFragments : dupList) {
writeFragments(psiFragments, writer, project, false);
}
writer.endNode();
}
//root node
writer.endNode();
writer.flush();
} finally {
if (fileWriter != null) {
fileWriter.close();
}
}
fileWriter = null;
//duplicates
try {
fileWriter = new FileWriter(path + File.separator + "duplicates.xml");
PrettyPrintWriter writer = new PrettyPrintWriter(fileWriter);
writer.startNode("root");
final DupInfo info = getInfo();
final int patterns = info.getPatterns();
for (int i = 0; i < patterns; i++) {
writer.startNode("duplicate");
writer.addAttribute("cost", String.valueOf(info.getPatternCost(i)));
writer.addAttribute("hash", String.valueOf(info.getHash(i)));
writeFragments(Arrays.asList(info.getFragmentOccurences(i)), writer, project, true);
writer.endNode();
}
//root node
writer.endNode();
writer.flush();
} finally {
if (fileWriter != null) {
fileWriter.close();
}
}
}
use of com.intellij.dupLocator.util.PsiFragment in project intellij-community by JetBrains.
the class DuplocatorHashCallback method getInfo.
public DupInfo getInfo() {
final TObjectIntHashMap<PsiFragment[]> duplicateList = new TObjectIntHashMap<>();
myDuplicates.forEachEntry(new TIntObjectProcedure<List<List<PsiFragment>>>() {
public boolean execute(final int hash, final List<List<PsiFragment>> listList) {
for (List<PsiFragment> list : listList) {
final int len = list.size();
if (len > 1) {
PsiFragment[] filtered = new PsiFragment[len];
int idx = 0;
for (final PsiFragment fragment : list) {
fragment.markDuplicate();
filtered[idx++] = fragment;
}
duplicateList.put(filtered, hash);
}
}
return true;
}
});
myDuplicates = null;
for (TObjectIntIterator<PsiFragment[]> dups = duplicateList.iterator(); dups.hasNext(); ) {
dups.advance();
PsiFragment[] fragments = dups.key();
LOG.assertTrue(fragments.length > 1);
boolean nested = false;
for (PsiFragment fragment : fragments) {
if (fragment.isNested()) {
nested = true;
break;
}
}
if (nested) {
dups.remove();
}
}
final Object[] duplicates = duplicateList.keys();
Arrays.sort(duplicates, (x, y) -> ((PsiFragment[]) y)[0].getCost() - ((PsiFragment[]) x)[0].getCost());
return new DupInfo() {
private final TIntObjectHashMap<GroupNodeDescription> myPattern2Description = new TIntObjectHashMap<>();
public int getPatterns() {
return duplicates.length;
}
public int getPatternCost(int number) {
return ((PsiFragment[]) duplicates[number])[0].getCost();
}
public int getPatternDensity(int number) {
return ((PsiFragment[]) duplicates[number]).length;
}
public PsiFragment[] getFragmentOccurences(int pattern) {
return (PsiFragment[]) duplicates[pattern];
}
public UsageInfo[] getUsageOccurences(int pattern) {
PsiFragment[] occs = getFragmentOccurences(pattern);
UsageInfo[] infos = new UsageInfo[occs.length];
for (int i = 0; i < infos.length; i++) {
infos[i] = occs[i].getUsageInfo();
}
return infos;
}
public int getFileCount(final int pattern) {
if (myPattern2Description.containsKey(pattern)) {
return myPattern2Description.get(pattern).getFilesCount();
}
return cacheGroupNodeDescription(pattern).getFilesCount();
}
private GroupNodeDescription cacheGroupNodeDescription(final int pattern) {
final Set<PsiFile> files = new HashSet<>();
final PsiFragment[] occurencies = getFragmentOccurences(pattern);
for (PsiFragment occurency : occurencies) {
final PsiFile file = occurency.getFile();
if (file != null) {
files.add(file);
}
}
final int fileCount = files.size();
final PsiFile psiFile = occurencies[0].getFile();
DuplicatesProfile profile = DuplicatesProfileCache.getProfile(this, pattern);
String comment = profile != null ? profile.getComment(this, pattern) : "";
final GroupNodeDescription description = new GroupNodeDescription(fileCount, psiFile != null ? psiFile.getName() : "unknown", comment);
myPattern2Description.put(pattern, description);
return description;
}
@Nullable
public String getTitle(int pattern) {
if (getFileCount(pattern) == 1) {
if (myPattern2Description.containsKey(pattern)) {
return myPattern2Description.get(pattern).getTitle();
}
return cacheGroupNodeDescription(pattern).getTitle();
}
return null;
}
@Nullable
public String getComment(int pattern) {
if (getFileCount(pattern) == 1) {
if (myPattern2Description.containsKey(pattern)) {
return myPattern2Description.get(pattern).getComment();
}
return cacheGroupNodeDescription(pattern).getComment();
}
return null;
}
public int getHash(final int i) {
return duplicateList.get((PsiFragment[]) duplicates[i]);
}
};
}
Aggregations