use of com.google.idea.blaze.base.command.buildresult.BlazeArtifact in project intellij by bazelbuild.
the class UnpackedAarsTest method initTest.
@Override
protected void initTest(Container applicationServices, Container projectServices) {
writingOutputSink = new WritingOutputSink();
context = BlazeContext.create();
context.addOutputSink(PrintOutput.class, writingOutputSink);
workspaceRoot = new WorkspaceRoot(folder.getRoot());
localArtifactLocationDecoder = new MockArtifactLocationDecoder() {
@Override
public File decode(ArtifactLocation artifactLocation) {
return new File(workspaceRoot.directory(), artifactLocation.getRelativePath());
}
};
remoteArtifactLocationDecoder = new MockArtifactLocationDecoder() {
@Override
public File decode(ArtifactLocation artifactLocation) {
return new File(workspaceRoot.directory(), artifactLocation.getRelativePath());
}
@Override
public BlazeArtifact resolveOutput(ArtifactLocation artifact) {
if (!artifact.isSource()) {
File file = new File(workspaceRoot.directory(), artifact.getRelativePath());
// when the remote artifact cannot be resolved, it will guess it as local artifact.
return file.exists() ? new FakeRemoteOutputArtifact(file) : super.resolveOutput(artifact);
}
return super.resolveOutput(artifact);
}
};
projectServices.register(BlazeImportSettingsManager.class, new BlazeImportSettingsManager(project));
try {
File projectDataDirectory = folder.newFolder("projectdata");
BlazeImportSettings dummyImportSettings = new BlazeImportSettings("", "", projectDataDirectory.getAbsolutePath(), "", BuildSystemName.Bazel);
BlazeImportSettingsManager.getInstance(project).setImportSettings(dummyImportSettings);
} catch (IOException e) {
throw new AssertionError("Fail to create directory for test", e);
}
applicationServices.register(FileOperationProvider.class, new FileOperationProvider());
applicationServices.register(RemoteArtifactPrefetcher.class, new DefaultPrefetcher());
projectServices.register(UnpackedAars.class, new UnpackedAars(project));
registerExtensionPoint(FileCache.EP_NAME, FileCache.class).registerExtension(new FileCacheAdapter());
registerExtensionPoint(BlazeSyncPlugin.EP_NAME, BlazeSyncPlugin.class).registerExtension(new BlazeAndroidSyncPlugin());
registerExtensionPoint(BlazeLibrarySorter.EP_NAME, BlazeLibrarySorter.class);
applicationServices.register(ExperimentService.class, new MockExperimentService());
}
use of com.google.idea.blaze.base.command.buildresult.BlazeArtifact in project intellij by bazelbuild.
the class FileCacheDiffer method readTimestamps.
private static ImmutableMap<File, Long> readTimestamps(Map<String, ? extends BlazeArtifact> newOutputs, Map<String, File> cachedFiles) throws InterruptedException, ExecutionException {
boolean timestampsRequired = newOutputs.values().stream().anyMatch(a -> a instanceof LocalFileArtifact);
if (!timestampsRequired) {
return ImmutableMap.of();
}
Set<File> relevantFiles = new HashSet<>();
for (Map.Entry<String, ? extends BlazeArtifact> entry : newOutputs.entrySet()) {
BlazeArtifact newOutput = entry.getValue();
boolean needsTimestamp = newOutput instanceof LocalFileArtifact;
if (!needsTimestamp) {
continue;
}
relevantFiles.add(((LocalFileArtifact) newOutput).getFile());
File cached = cachedFiles.get(entry.getKey());
if (cached != null) {
relevantFiles.add(cached);
}
}
return ModifiedTimeScanner.readTimestamps(relevantFiles);
}
use of com.google.idea.blaze.base.command.buildresult.BlazeArtifact in project intellij by bazelbuild.
the class JarCache method getArtifactsToCache.
/**
* Returns a map from cache key to BlazeArtifact, for all the artifacts which should be cached.
*/
private static ImmutableMap<String, BlazeArtifact> getArtifactsToCache(ProjectViewSet projectViewSet, BlazeProjectData projectData) {
List<LibraryArtifact> jarLibraries = BlazeLibraryCollector.getLibraries(projectViewSet, projectData).stream().filter(library -> library instanceof BlazeJarLibrary).map(library -> ((BlazeJarLibrary) library).libraryArtifact).collect(Collectors.toList());
ArtifactLocationDecoder decoder = projectData.getArtifactLocationDecoder();
Map<String, BlazeArtifact> newOutputs = new HashMap<>();
for (LibraryArtifact lib : jarLibraries) {
BlazeArtifact jar = decoder.resolveOutput(lib.jarForIntellijLibrary());
newOutputs.put(cacheKeyForJar(jar), jar);
for (ArtifactLocation sourceJar : lib.getSourceJars()) {
BlazeArtifact srcJar = decoder.resolveOutput(sourceJar);
newOutputs.put(cacheKeyForSourceJar(srcJar), srcJar);
}
}
LintJarHelper.collectLintJarsArtifacts(projectData).forEach(jar -> newOutputs.put(cacheKeyForJar(jar), jar));
return ImmutableMap.copyOf(newOutputs);
}
use of com.google.idea.blaze.base.command.buildresult.BlazeArtifact in project intellij by bazelbuild.
the class PackageManifestReader method readPackageManifestFiles.
/**
* @return A map from java source absolute file path to declared package string.
*/
public Map<TargetKey, Map<ArtifactLocation, String>> readPackageManifestFiles(Project project, BlazeContext context, ArtifactLocationDecoder decoder, Map<TargetKey, ArtifactLocation> javaPackageManifests, ListeningExecutorService executorService) {
Map<OutputArtifact, TargetKey> fileToLabelMap = Maps.newHashMap();
for (Map.Entry<TargetKey, ArtifactLocation> entry : javaPackageManifests.entrySet()) {
TargetKey key = entry.getKey();
BlazeArtifact artifact = decoder.resolveOutput(entry.getValue());
if (artifact instanceof OutputArtifact) {
fileToLabelMap.put((OutputArtifact) artifact, key);
}
}
ArtifactsDiff diff;
try {
diff = ArtifactsDiff.diffArtifacts(artifactState, fileToLabelMap.keySet());
artifactState = diff.getNewState();
} catch (InterruptedException e) {
throw new ProcessCanceledException(e);
} catch (ExecutionException e) {
context.setHasError();
IssueOutput.error("Updating package manifest files failed: " + e).submit(context);
throw new AssertionError("Unhandled exception", e);
}
// Find all not cached {@link RemoteOutputArtifact} and download them before parsing manifest
// file
ImmutableList<RemoteOutputArtifact> toDownload = BlazeArtifact.getRemoteArtifacts(diff.getUpdatedOutputs()).stream().filter(a -> findArtifactInCache(project, a) == null).collect(toImmutableList());
ListenableFuture<?> fetchRemoteArtifactFuture = RemoteArtifactPrefetcher.getInstance().downloadArtifacts(project.getName(), toDownload);
ListenableFuture<PrefetchStats> fetchLocalFilesFuture = PrefetchService.getInstance().prefetchFiles(BlazeArtifact.getLocalFiles(diff.getUpdatedOutputs()), true, false);
if (!FutureUtil.waitForFuture(context, Futures.allAsList(fetchRemoteArtifactFuture, fetchLocalFilesFuture)).timed("FetchPackageManifests", EventType.Prefetching).withProgressMessage("Reading package manifests...").run().success()) {
return null;
}
try {
long bytesConsumed = toDownload.stream().mapToLong(RemoteOutputArtifact::getLength).sum() + fetchLocalFilesFuture.get().bytesPrefetched();
if (bytesConsumed > 0) {
context.output(new NetworkTrafficUsedOutput(bytesConsumed, "packagemanifest"));
}
} catch (InterruptedException | ExecutionException e) {
// Should never happen - the future has already completed.
logger.error(e);
// carry on - failing to log the stats should not affect anything else.
}
List<ListenableFuture<Void>> futures = Lists.newArrayList();
for (OutputArtifact file : diff.getUpdatedOutputs()) {
futures.add(executorService.submit(() -> {
Map<ArtifactLocation, String> manifest = parseManifestFile(project, file);
manifestMap.put(fileToLabelMap.get(file), manifest);
return null;
}));
}
for (ArtifactState file : diff.getRemovedOutputs()) {
TargetKey key = this.fileToLabelMap.get(file);
if (key != null) {
manifestMap.remove(key);
}
}
this.fileToLabelMap = fileToLabelMap.entrySet().stream().filter(e -> diff.getNewState().containsKey(e.getKey().getKey())).collect(toImmutableMap(e -> e.getKey().toArtifactState(), Map.Entry::getValue));
try {
Futures.allAsList(futures).get();
} catch (ExecutionException | InterruptedException e) {
logger.error(e);
throw new IllegalStateException("Could not read sources");
}
return manifestMap;
}
use of com.google.idea.blaze.base.command.buildresult.BlazeArtifact in project intellij by bazelbuild.
the class JdepsFileReader method doLoadJdepsFiles.
@Nullable
private JdepsState doLoadJdepsFiles(Project project, BlazeContext context, ArtifactLocationDecoder decoder, @Nullable JdepsState oldState, Collection<TargetIdeInfo> targetsToLoad, SyncMode syncMode) throws InterruptedException, ExecutionException {
Map<OutputArtifact, TargetKey> fileToTargetMap = Maps.newHashMap();
for (TargetIdeInfo target : targetsToLoad) {
BlazeArtifact output = resolveJdepsOutput(decoder, target);
if (output instanceof OutputArtifact) {
fileToTargetMap.put((OutputArtifact) output, target.getKey());
}
}
ArtifactsDiff diff = ArtifactsDiff.diffArtifacts(oldState != null ? oldState.getArtifactState() : null, fileToTargetMap.keySet());
// TODO: handle prefetching for arbitrary OutputArtifacts
List<OutputArtifact> outputArtifacts = diff.getUpdatedOutputs();
// already. Additional logging to identify what is going wrong.
if (!outputArtifacts.isEmpty() && !syncMode.involvesBlazeBuild()) {
logger.warn("ArtifactDiff: " + outputArtifacts.size() + " outputs need to be updated during SyncMode.NO_BUILD ");
if (oldState == null) {
logger.warn("ArtifactDiff: oldState == null, we failed to load prior JdepsState.");
} else {
// Do not list all artifacts since it may be pretty long.
if (oldState.getArtifactState().size() != fileToTargetMap.size()) {
logger.warn("Existing artifact state does not match with target map." + " [oldState.getArtifactState().size() = " + oldState.getArtifactState().size() + ", fileToTargetMap.size() = " + fileToTargetMap.size() + "]");
}
}
}
ListenableFuture<?> downloadArtifactsFuture = RemoteArtifactPrefetcher.getInstance().downloadArtifacts(/* projectName= */
project.getName(), /* outputArtifacts= */
BlazeArtifact.getRemoteArtifacts(outputArtifacts));
ListenableFuture<?> fetchLocalFilesFuture = PrefetchService.getInstance().prefetchFiles(BlazeArtifact.getLocalFiles(outputArtifacts), true, false);
if (!FutureUtil.waitForFuture(context, Futures.allAsList(downloadArtifactsFuture, fetchLocalFilesFuture)).timed("FetchJdeps", EventType.Prefetching).withProgressMessage("Reading jdeps files...").run().success()) {
return null;
}
AtomicLong totalSizeLoaded = new AtomicLong(0);
List<ListenableFuture<Result>> futures = Lists.newArrayList();
for (OutputArtifact updatedFile : outputArtifacts) {
futures.add(FetchExecutor.EXECUTOR.submit(() -> {
totalSizeLoaded.addAndGet(updatedFile.getLength());
try (InputStream inputStream = updatedFile.getInputStream()) {
Deps.Dependencies dependencies = Deps.Dependencies.parseFrom(inputStream);
if (dependencies == null) {
return null;
}
List<String> deps = dependencies.getDependencyList().stream().filter(dep -> relevantDep(dep)).map(Dependency::getPath).collect(toImmutableList());
TargetKey targetKey = fileToTargetMap.get(updatedFile);
return new Result(updatedFile, targetKey, deps);
} catch (IOException e) {
logger.info("Could not read jdeps file: " + updatedFile);
return null;
}
}));
}
JdepsState.Builder state = JdepsState.builder();
if (oldState != null) {
state.list.addAll(oldState.data);
}
state.removeArtifacts(diff.getUpdatedOutputs().stream().map(OutputArtifact::toArtifactState).collect(toImmutableList()));
state.removeArtifacts(diff.getRemovedOutputs());
for (Result result : Futures.allAsList(futures).get()) {
if (result != null) {
state.list.add(JdepsData.create(result.targetKey, result.dependencies, result.output.toArtifactState()));
}
}
context.output(PrintOutput.log(String.format("Loaded %d jdeps files, total size %dkB", diff.getUpdatedOutputs().size(), totalSizeLoaded.get() / 1024)));
return state.build();
}
Aggregations