use of org.jetbrains.jps.cmdline.ProjectDescriptor in project intellij-community by JetBrains.
the class IncProjectBuilder method clearOutputs.
private void clearOutputs(CompileContext context) throws ProjectBuildException {
final long cleanStart = System.currentTimeMillis();
final MultiMap<File, BuildTarget<?>> rootsToDelete = MultiMap.createSet();
final Set<File> allSourceRoots = ContainerUtil.newTroveSet(FileUtil.FILE_HASHING_STRATEGY);
final ProjectDescriptor projectDescriptor = context.getProjectDescriptor();
final List<? extends BuildTarget<?>> allTargets = projectDescriptor.getBuildTargetIndex().getAllTargets();
for (BuildTarget<?> target : allTargets) {
if (target instanceof ModuleBasedTarget) {
for (File file : target.getOutputRoots(context)) {
rootsToDelete.putValue(file, target);
}
} else {
if (context.getScope().isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
final ModuleExcludeIndex moduleIndex = projectDescriptor.getModuleExcludeIndex();
for (BuildTarget<?> target : allTargets) {
for (BuildRootDescriptor descriptor : projectDescriptor.getBuildRootIndex().getTargetRoots(target, context)) {
// excluding from checks roots with generated sources; because it is safe to delete generated stuff
if (!descriptor.isGenerated()) {
File rootFile = descriptor.getRootFile();
//However if a root isn't excluded it means that its content will be shown in 'Project View' and an user can create new files under it so it would be dangerous to clean such roots
if (moduleIndex.isInContent(rootFile)) {
allSourceRoots.add(rootFile);
}
}
}
}
// check that output and source roots are not overlapping
final CompileScope compileScope = context.getScope();
final List<File> filesToDelete = new ArrayList<>();
final Predicate<BuildTarget<?>> forcedBuild = new Predicate<BuildTarget<?>>() {
public boolean apply(BuildTarget<?> input) {
return compileScope.isBuildForced(input);
}
};
for (Map.Entry<File, Collection<BuildTarget<?>>> entry : rootsToDelete.entrySet()) {
context.checkCanceled();
final File outputRoot = entry.getKey();
final Collection<BuildTarget<?>> rootTargets = entry.getValue();
final Applicability applicability = Applicability.calculate(forcedBuild, rootTargets);
if (applicability == Applicability.NONE) {
continue;
}
boolean okToDelete = applicability == Applicability.ALL;
if (okToDelete && !moduleIndex.isExcluded(outputRoot)) {
// so in this case it is safe to delete such root
if (JpsPathUtil.isUnder(allSourceRoots, outputRoot)) {
okToDelete = false;
} else {
final Set<File> _outRoot = ContainerUtil.newTroveSet(FileUtil.FILE_HASHING_STRATEGY, outputRoot);
for (File srcRoot : allSourceRoots) {
if (JpsPathUtil.isUnder(_outRoot, srcRoot)) {
okToDelete = false;
break;
}
}
}
}
if (okToDelete) {
// do not delete output root itself to avoid lots of unnecessary "roots_changed" events in IDEA
final File[] children = outputRoot.listFiles();
if (children != null) {
for (File child : children) {
if (!child.delete()) {
filesToDelete.add(child);
}
}
} else {
// the output root must be file
if (!outputRoot.delete()) {
filesToDelete.add(outputRoot);
}
}
registerTargetsWithClearedOutput(context, rootTargets);
} else {
if (applicability == Applicability.ALL) {
// only warn if unable to delete because of roots intersection
context.processMessage(new CompilerMessage("", BuildMessage.Kind.WARNING, "Output path " + outputRoot.getPath() + " intersects with a source root. Only files that were created by build will be cleaned."));
}
context.processMessage(new ProgressMessage("Cleaning output directories..."));
// clean only those files we are aware of
for (BuildTarget<?> target : rootTargets) {
if (compileScope.isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
}
if (!filesToDelete.isEmpty()) {
context.processMessage(new ProgressMessage("Cleaning output directories..."));
if (SYNC_DELETE) {
for (File file : filesToDelete) {
context.checkCanceled();
FileUtil.delete(file);
}
} else {
myAsyncTasks.add(FileUtil.asyncDelete(filesToDelete));
}
}
LOG.info("Cleaned output directories in " + (System.currentTimeMillis() - cleanStart) + " ms");
}
use of org.jetbrains.jps.cmdline.ProjectDescriptor in project intellij-community by JetBrains.
the class IncProjectBuilder method reportUnprocessedChanges.
private static void reportUnprocessedChanges(CompileContextImpl context) {
final ProjectDescriptor pd = context.getProjectDescriptor();
final BuildFSState fsState = pd.fsState;
for (BuildTarget<?> target : pd.getBuildTargetIndex().getAllTargets()) {
if (fsState.hasUnprocessedChanges(context, target)) {
context.processMessage(new UnprocessedFSChangesNotification());
break;
}
}
}
use of org.jetbrains.jps.cmdline.ProjectDescriptor in project intellij-community by JetBrains.
the class IncProjectBuilder method buildChunks.
private void buildChunks(final CompileContextImpl context) throws ProjectBuildException {
try {
final CompileScope scope = context.getScope();
final ProjectDescriptor pd = context.getProjectDescriptor();
final BuildTargetIndex targetIndex = pd.getBuildTargetIndex();
// for better progress dynamics consider only actually affected chunks
int totalAffected = 0;
for (BuildTargetChunk chunk : targetIndex.getSortedTargetChunks(context)) {
if (isAffected(context.getScope(), chunk)) {
totalAffected += chunk.getTargets().size();
}
}
myTotalTargetsWork = totalAffected;
boolean compileInParallel = BuildRunner.PARALLEL_BUILD_ENABLED;
if (compileInParallel && MAX_BUILDER_THREADS <= 1) {
LOG.info("Switched off parallel compilation because maximum number of builder threads is less than 2. Set '" + GlobalOptions.COMPILE_PARALLEL_MAX_THREADS_OPTION + "' system property to a value greater than 1 to really enable parallel compilation.");
compileInParallel = false;
}
if (compileInParallel) {
new BuildParallelizer(context).buildInParallel();
} else {
// non-parallel build
for (BuildTargetChunk chunk : targetIndex.getSortedTargetChunks(context)) {
try {
buildChunkIfAffected(context, scope, chunk);
} finally {
pd.dataManager.closeSourceToOutputStorages(Collections.singleton(chunk));
pd.dataManager.flush(true);
}
}
}
} catch (IOException e) {
throw new ProjectBuildException(e);
}
}
use of org.jetbrains.jps.cmdline.ProjectDescriptor in project intellij-community by JetBrains.
the class IncProjectBuilder method cleanOutputRoots.
private void cleanOutputRoots(CompileContext context, boolean cleanCaches) throws ProjectBuildException {
final ProjectDescriptor projectDescriptor = context.getProjectDescriptor();
ProjectBuildException ex = null;
try {
final JpsJavaCompilerConfiguration configuration = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(projectDescriptor.getProject());
final boolean shouldClear = configuration.isClearOutputDirectoryOnRebuild();
if (shouldClear) {
clearOutputs(context);
} else {
for (BuildTarget<?> target : projectDescriptor.getBuildTargetIndex().getAllTargets()) {
context.checkCanceled();
if (context.getScope().isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
} catch (ProjectBuildException e) {
ex = e;
} finally {
if (cleanCaches) {
try {
projectDescriptor.timestamps.getStorage().clean();
} catch (IOException e) {
if (ex == null) {
ex = new ProjectBuildException("Error cleaning timestamps storage", e);
} else {
LOG.info("Error cleaning timestamps storage", e);
}
} finally {
try {
projectDescriptor.dataManager.clean();
} catch (IOException e) {
if (ex == null) {
ex = new ProjectBuildException("Error cleaning compiler storages", e);
} else {
LOG.info("Error cleaning compiler storages", e);
}
} finally {
projectDescriptor.fsState.clearAll();
if (ex != null) {
throw ex;
}
}
}
}
}
}
use of org.jetbrains.jps.cmdline.ProjectDescriptor in project intellij-community by JetBrains.
the class IncArtifactBuilder method build.
@Override
public void build(@NotNull ArtifactBuildTarget target, @NotNull DirtyFilesHolder<ArtifactRootDescriptor, ArtifactBuildTarget> holder, @NotNull BuildOutputConsumer outputConsumer, @NotNull final CompileContext context) throws ProjectBuildException {
JpsArtifact artifact = target.getArtifact();
String outputFilePath = artifact.getOutputFilePath();
if (StringUtil.isEmpty(outputFilePath)) {
context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, "Cannot build '" + artifact.getName() + "' artifact: output path is not specified"));
return;
}
final ProjectDescriptor pd = context.getProjectDescriptor();
final ArtifactSorter sorter = new ArtifactSorter(pd.getModel());
final Map<JpsArtifact, JpsArtifact> selfIncludingNameMap = sorter.getArtifactToSelfIncludingNameMap();
final JpsArtifact selfIncluding = selfIncludingNameMap.get(artifact);
if (selfIncluding != null) {
String name = selfIncluding.equals(artifact) ? "it" : "'" + selfIncluding.getName() + "' artifact";
context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, "Cannot build '" + artifact.getName() + "' artifact: " + name + " includes itself in the output layout"));
return;
}
try {
final Collection<String> deletedFiles = holder.getRemovedFiles(target);
String messageText = "Building artifact '" + artifact.getName() + "'...";
context.processMessage(new ProgressMessage(messageText));
LOG.debug(messageText);
runArtifactTasks(context, target.getArtifact(), ArtifactBuildTaskProvider.ArtifactBuildPhase.PRE_PROCESSING);
final SourceToOutputMapping srcOutMapping = pd.dataManager.getSourceToOutputMap(target);
final ArtifactOutputToSourceMapping outSrcMapping = pd.dataManager.getStorage(target, ArtifactOutToSourceStorageProvider.INSTANCE);
final TIntObjectHashMap<Set<String>> filesToProcess = new TIntObjectHashMap<>();
final MultiMap<String, String> filesToDelete = new MultiMap<>();
final Set<String> deletedOutputPaths = new THashSet<>(FileUtil.PATH_HASHING_STRATEGY);
for (String sourcePath : deletedFiles) {
final Collection<String> outputPaths = srcOutMapping.getOutputs(sourcePath);
if (outputPaths != null) {
for (String outputPath : outputPaths) {
if (deletedOutputPaths.add(outputPath)) {
collectSourcesCorrespondingToOutput(outputPath, sourcePath, deletedFiles, outSrcMapping, filesToProcess, filesToDelete);
}
}
}
}
final Set<String> changedOutputPaths = new THashSet<>(FileUtil.PATH_HASHING_STRATEGY);
holder.processDirtyFiles(new FileProcessor<ArtifactRootDescriptor, ArtifactBuildTarget>() {
@Override
public boolean apply(ArtifactBuildTarget target, File file, ArtifactRootDescriptor root) throws IOException {
int rootIndex = root.getRootIndex();
String sourcePath = FileUtil.toSystemIndependentName(file.getPath());
addFileToProcess(filesToProcess, rootIndex, sourcePath, deletedFiles);
final Collection<String> outputPaths = srcOutMapping.getOutputs(sourcePath);
if (outputPaths != null) {
for (String outputPath : outputPaths) {
if (changedOutputPaths.add(outputPath)) {
collectSourcesCorrespondingToOutput(outputPath, sourcePath, deletedFiles, outSrcMapping, filesToProcess, filesToDelete);
}
}
}
return true;
}
});
BuildOperations.cleanOutputsCorrespondingToChangedFiles(context, holder);
for (String outputPath : changedOutputPaths) {
outSrcMapping.remove(outputPath);
}
if (filesToDelete.isEmpty() && filesToProcess.isEmpty()) {
return;
}
deleteOutdatedFiles(filesToDelete, context, srcOutMapping, outSrcMapping);
context.checkCanceled();
context.processMessage(new ProgressMessage("Building artifact '" + artifact.getName() + "': copying files..."));
final Set<JarInfo> changedJars = new THashSet<>();
for (ArtifactRootDescriptor descriptor : pd.getBuildRootIndex().getTargetRoots(target, context)) {
context.checkCanceled();
final Set<String> sourcePaths = filesToProcess.get(descriptor.getRootIndex());
if (sourcePaths == null)
continue;
for (String sourcePath : sourcePaths) {
if (!descriptor.getFilter().shouldBeCopied(sourcePath, pd)) {
if (LOG.isDebugEnabled()) {
LOG.debug("File " + sourcePath + " will be skipped because it isn't accepted by filter");
}
continue;
}
DestinationInfo destination = descriptor.getDestinationInfo();
if (destination instanceof ExplodedDestinationInfo) {
descriptor.copyFromRoot(sourcePath, descriptor.getRootIndex(), destination.getOutputPath(), context, outputConsumer, outSrcMapping);
} else {
List<ArtifactOutputToSourceMapping.SourcePathAndRootIndex> sources = outSrcMapping.getState(destination.getOutputFilePath());
if (sources == null || sources.size() > 0 && sources.get(0).getRootIndex() == descriptor.getRootIndex()) {
outSrcMapping.update(destination.getOutputFilePath(), Collections.<ArtifactOutputToSourceMapping.SourcePathAndRootIndex>emptyList());
changedJars.add(((JarDestinationInfo) destination).getJarInfo());
}
}
}
}
context.checkCanceled();
JarsBuilder builder = new JarsBuilder(changedJars, context, outputConsumer, outSrcMapping);
builder.buildJars();
runArtifactTasks(context, artifact, ArtifactBuildTaskProvider.ArtifactBuildPhase.FINISHING_BUILD);
runArtifactTasks(context, artifact, ArtifactBuildTaskProvider.ArtifactBuildPhase.POST_PROCESSING);
} catch (IOException e) {
throw new ProjectBuildException(e);
}
}
Aggregations