use of gnu.trove.THashMap in project intellij-community by JetBrains.
the class CompilerEncodingServiceImpl method computeModuleCharsetMap.
@NotNull
private Map<Module, Set<Charset>> computeModuleCharsetMap() {
final Map<Module, Set<Charset>> map = new THashMap<>();
final Map<VirtualFile, Charset> mappings = ((EncodingProjectManagerImpl) EncodingProjectManager.getInstance(myProject)).getAllMappings();
ProjectFileIndex index = ProjectRootManager.getInstance(myProject).getFileIndex();
final CompilerManager compilerManager = CompilerManager.getInstance(myProject);
for (Map.Entry<VirtualFile, Charset> entry : mappings.entrySet()) {
final VirtualFile file = entry.getKey();
final Charset charset = entry.getValue();
if (file == null || charset == null || (!file.isDirectory() && !compilerManager.isCompilableFileType(file.getFileType())) || !index.isUnderSourceRootOfType(file, JavaModuleSourceRootTypes.SOURCES))
continue;
final Module module = index.getModuleForFile(file);
if (module == null)
continue;
Set<Charset> set = map.get(module);
if (set == null) {
set = new LinkedHashSet<>();
map.put(module, set);
final VirtualFile sourceRoot = index.getSourceRootForFile(file);
VirtualFile current = file.getParent();
Charset parentCharset = null;
while (current != null) {
final Charset currentCharset = mappings.get(current);
if (currentCharset != null) {
parentCharset = currentCharset;
}
if (current.equals(sourceRoot)) {
break;
}
current = current.getParent();
}
if (parentCharset != null) {
set.add(parentCharset);
}
}
set.add(charset);
}
//todo[nik,jeka] perhaps we should take into account encodings of source roots only not individual files
for (Module module : ModuleManager.getInstance(myProject).getModules()) {
for (VirtualFile file : ModuleRootManager.getInstance(module).getSourceRoots(true)) {
Charset encoding = EncodingProjectManager.getInstance(myProject).getEncoding(file, true);
if (encoding != null) {
Set<Charset> charsets = map.get(module);
if (charsets == null) {
charsets = new LinkedHashSet<>();
map.put(module, charsets);
}
charsets.add(encoding);
}
}
}
return map;
}
use of gnu.trove.THashMap in project intellij-elixir by KronicDeth.
the class Import method aritiesByNameFromNameByArityKeywordList.
@NotNull
private static Map<String, List<Integer>> aritiesByNameFromNameByArityKeywordList(PsiElement element) {
Map<String, List<Integer>> aritiesByName = new THashMap<String, List<Integer>>();
PsiElement stripped = stripAccessExpression(element);
if (stripped instanceof ElixirList) {
aritiesByName = aritiesByNameFromNameByArityKeywordList((ElixirList) stripped);
}
return aritiesByName;
}
use of gnu.trove.THashMap in project intellij-community by JetBrains.
the class PassExecutorService method submitPasses.
void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap, @NotNull DaemonProgressIndicator updateProgress) {
if (isDisposed())
return;
// null keys are ok
MultiMap<Document, FileEditor> documentToEditors = MultiMap.createSet();
MultiMap<FileEditor, TextEditorHighlightingPass> documentBoundPasses = MultiMap.createSmart();
MultiMap<FileEditor, EditorBoundHighlightingPass> editorBoundPasses = MultiMap.createSmart();
List<Pair<FileEditor, TextEditorHighlightingPass>> passesWithNoDocuments = new ArrayList<>();
Set<VirtualFile> vFiles = new HashSet<>();
for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) {
FileEditor fileEditor = entry.getKey();
HighlightingPass[] passes = entry.getValue();
Document document;
if (fileEditor instanceof TextEditor) {
Editor editor = ((TextEditor) fileEditor).getEditor();
LOG.assertTrue(!(editor instanceof EditorWindow));
document = editor.getDocument();
} else {
VirtualFile virtualFile = ((FileEditorManagerEx) FileEditorManager.getInstance(myProject)).getFile(fileEditor);
document = virtualFile == null ? null : FileDocumentManager.getInstance().getDocument(virtualFile);
}
if (document != null) {
vFiles.add(FileDocumentManager.getInstance().getFile(document));
}
int prevId = 0;
for (final HighlightingPass pass : passes) {
if (pass instanceof EditorBoundHighlightingPass) {
EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass) pass;
// have to make ids unique for this document
editorPass.setId(nextPassId.incrementAndGet());
editorBoundPasses.putValue(fileEditor, editorPass);
} else {
TextEditorHighlightingPass textEditorHighlightingPass = convertToTextHighlightingPass(pass, document, nextPassId, prevId);
document = textEditorHighlightingPass.getDocument();
documentBoundPasses.putValue(fileEditor, textEditorHighlightingPass);
if (document == null) {
passesWithNoDocuments.add(Pair.create(fileEditor, textEditorHighlightingPass));
} else {
documentToEditors.putValue(document, fileEditor);
}
prevId = textEditorHighlightingPass.getId();
}
}
}
List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5);
List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10);
// (fileEditor, passId) -> created pass
Map<Pair<FileEditor, Integer>, ScheduledPass> toBeSubmitted = new THashMap<>(passesMap.size());
final AtomicInteger threadsToStartCountdown = new AtomicInteger(0);
for (Map.Entry<Document, Collection<FileEditor>> entry : documentToEditors.entrySet()) {
Collection<FileEditor> fileEditors = entry.getValue();
Document document = entry.getKey();
FileEditor preferredFileEditor = getPreferredFileEditor(document, fileEditors);
List<TextEditorHighlightingPass> passes = (List<TextEditorHighlightingPass>) documentBoundPasses.get(preferredFileEditor);
if (passes.isEmpty()) {
continue;
}
sortById(passes);
for (TextEditorHighlightingPass currentPass : passes) {
createScheduledPass(preferredFileEditor, currentPass, toBeSubmitted, passes, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
for (Map.Entry<FileEditor, Collection<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) {
FileEditor fileEditor = entry.getKey();
Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue();
List<TextEditorHighlightingPass> createdDocumentBoundPasses = (List<TextEditorHighlightingPass>) documentBoundPasses.get(fileEditor);
List<TextEditorHighlightingPass> allCreatedPasses = new ArrayList<>(createdDocumentBoundPasses);
allCreatedPasses.addAll(createdEditorBoundPasses);
for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) {
createScheduledPass(fileEditor, pass, toBeSubmitted, allCreatedPasses, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
for (Pair<FileEditor, TextEditorHighlightingPass> pair : passesWithNoDocuments) {
FileEditor fileEditor = pair.first;
TextEditorHighlightingPass pass = pair.second;
createScheduledPass(fileEditor, pass, toBeSubmitted, ContainerUtil.emptyList(), freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
if (CHECK_CONSISTENCY && !ApplicationInfoImpl.isInStressTest()) {
assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown);
}
log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses);
for (ScheduledPass dependentPass : dependentPasses) {
mySubmittedPasses.put(dependentPass, Job.NULL_JOB);
}
for (ScheduledPass freePass : freePasses) {
submit(freePass);
}
}
use of gnu.trove.THashMap in project intellij-community by JetBrains.
the class ImportHelper method prepareOptimizeImportsResult.
@Nullable("null means no need to replace the import list because they are the same")
PsiImportList prepareOptimizeImportsResult(@NotNull final PsiJavaFile file) {
PsiImportList oldList = file.getImportList();
if (oldList == null)
return null;
// Java parser works in a way that comments may be included to the import list, e.g.:
// import a;
// /* comment */
// import b;
// We want to preserve those comments then.
List<PsiElement> nonImports = new ArrayList<>();
// Note: this array may contain "<packageOrClassName>.*" for unresolved imports!
List<Pair<String, Boolean>> names = new ArrayList<>(collectNamesToImport(file, nonImports));
Collections.sort(names, Comparator.comparing(o -> o.getFirst()));
List<Pair<String, Boolean>> resultList = sortItemsAccordingToSettings(names, mySettings);
final Map<String, Boolean> classesOrPackagesToImportOnDemand = new THashMap<>();
collectOnDemandImports(resultList, mySettings, classesOrPackagesToImportOnDemand);
MultiMap<String, String> conflictingMemberNames = new MultiMap<>();
for (Pair<String, Boolean> pair : resultList) {
if (pair.second) {
conflictingMemberNames.putValue(StringUtil.getShortName(pair.first), StringUtil.getPackageName(pair.first));
}
}
for (String methodName : conflictingMemberNames.keySet()) {
Collection<String> collection = conflictingMemberNames.get(methodName);
if (!classesOrPackagesToImportOnDemand.keySet().containsAll(collection)) {
for (String name : collection) {
classesOrPackagesToImportOnDemand.remove(name);
}
}
}
Set<String> classesToUseSingle = findSingleImports(file, resultList, classesOrPackagesToImportOnDemand.keySet());
Set<String> toReimport = new THashSet<>();
calcClassesConflictingViaOnDemandImports(file, classesOrPackagesToImportOnDemand, file.getResolveScope(), toReimport);
classesToUseSingle.addAll(toReimport);
try {
StringBuilder text = buildImportListText(resultList, classesOrPackagesToImportOnDemand.keySet(), classesToUseSingle);
for (PsiElement nonImport : nonImports) {
text.append("\n").append(nonImport.getText());
}
String ext = StdFileTypes.JAVA.getDefaultExtension();
PsiFileFactory factory = PsiFileFactory.getInstance(file.getProject());
final PsiJavaFile dummyFile = (PsiJavaFile) factory.createFileFromText("_Dummy_." + ext, StdFileTypes.JAVA, text);
CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(file.getProject());
codeStyleManager.reformat(dummyFile);
PsiImportList newImportList = dummyFile.getImportList();
assert newImportList != null : dummyFile.getText();
PsiImportList result = (PsiImportList) newImportList.copy();
if (oldList.isReplaceEquivalent(result))
return null;
if (!nonImports.isEmpty()) {
PsiElement firstPrevious = newImportList.getPrevSibling();
while (firstPrevious != null && firstPrevious.getPrevSibling() != null) {
firstPrevious = firstPrevious.getPrevSibling();
}
for (PsiElement element = firstPrevious; element != null && element != newImportList; element = element.getNextSibling()) {
result.add(element.copy());
}
for (PsiElement element = newImportList.getNextSibling(); element != null; element = element.getNextSibling()) {
result.add(element.copy());
}
}
return result;
} catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
use of gnu.trove.THashMap in project intellij-community by JetBrains.
the class PluginManagerCore method initializePlugins.
private static void initializePlugins(@Nullable StartupProgress progress) {
configureExtensions();
final List<String> errors = ContainerUtil.newArrayList();
final IdeaPluginDescriptorImpl[] pluginDescriptors = loadDescriptors(progress, errors);
final Class callerClass = ReflectionUtil.findCallerClass(1);
assert callerClass != null;
final ClassLoader parentLoader = callerClass.getClassLoader();
final List<IdeaPluginDescriptorImpl> result = new ArrayList<>();
final Map<PluginId, IdeaPluginDescriptorImpl> idToDescriptorMap = new THashMap<>();
final Map<String, String> disabledPluginNames = new THashMap<>();
List<String> brokenPluginsList = new SmartList<>();
fixDescriptors(pluginDescriptors, parentLoader, idToDescriptorMap, disabledPluginNames, brokenPluginsList, result, errors);
final Graph<PluginId> graph = createPluginIdGraph(idToDescriptorMap);
final DFSTBuilder<PluginId> builder = new DFSTBuilder<>(graph);
prepareLoadingPluginsErrorMessage(errors);
final Comparator<PluginId> idComparator = builder.comparator();
// sort descriptors according to plugin dependencies
result.sort((o1, o2) -> idComparator.compare(o1.getPluginId(), o2.getPluginId()));
for (int i = 0; i < result.size(); i++) {
ourId2Index.put(result.get(i).getPluginId(), i);
}
int i = 0;
for (final IdeaPluginDescriptorImpl pluginDescriptor : result) {
if (pluginDescriptor.getPluginId().getIdString().equals(CORE_PLUGIN_ID) || pluginDescriptor.isUseCoreClassLoader()) {
pluginDescriptor.setLoader(parentLoader);
} else {
final List<File> classPath = pluginDescriptor.getClassPath();
final PluginId[] dependentPluginIds = pluginDescriptor.getDependentPluginIds();
final ClassLoader[] parentLoaders = getParentLoaders(idToDescriptorMap, dependentPluginIds);
ClassLoader pluginClassLoader = createPluginClassLoader(classPath.toArray(new File[classPath.size()]), parentLoaders.length > 0 ? parentLoaders : new ClassLoader[] { parentLoader }, pluginDescriptor);
pluginDescriptor.setLoader(pluginClassLoader);
}
if (progress != null) {
progress.showProgress("", PLUGINS_PROGRESS_PART + (i++ / (float) result.size()) * LOADERS_PROGRESS_PART);
}
}
registerExtensionPointsAndExtensions(Extensions.getRootArea(), result);
Extensions.getRootArea().getExtensionPoint(Extensions.AREA_LISTENER_EXTENSION_POINT).registerExtension(new AreaListener() {
@Override
public void areaCreated(@NotNull String areaClass, @NotNull AreaInstance areaInstance) {
registerExtensionPointsAndExtensions(Extensions.getArea(areaInstance), result);
}
@Override
public void areaDisposing(@NotNull String areaClass, @NotNull AreaInstance areaInstance) {
}
});
ourPlugins = pluginDescriptors;
}
Aggregations