use of com.facebook.buck.artifact_cache.ArtifactCache in project buck by facebook.
the class BuildInfoRecorderTest method testPerformUploadToArtifactCache.
@Test
public void testPerformUploadToArtifactCache() throws IOException, InterruptedException {
FakeProjectFilesystem filesystem = new FakeProjectFilesystem();
BuildInfoRecorder buildInfoRecorder = createBuildInfoRecorder(filesystem);
BuckEventBus bus = new BuckEventBus(new FakeClock(0), new BuildId("BUILD"));
final byte[] contents = "contents".getBytes();
Path file = Paths.get("file");
filesystem.writeBytesToPath(contents, file);
buildInfoRecorder.recordArtifact(file);
Path dir = Paths.get("dir");
filesystem.mkdirs(dir);
filesystem.writeBytesToPath(contents, dir.resolve("file"));
buildInfoRecorder.recordArtifact(dir);
// Record some metadata.
buildInfoRecorder.addMetadata("metadata", "metadata");
// Record some build metadata.
buildInfoRecorder.addBuildMetadata("build-metadata", "build-metadata");
buildInfoRecorder.writeMetadataToDisk(true);
final AtomicBoolean stored = new AtomicBoolean(false);
final ArtifactCache cache = new NoopArtifactCache() {
@Override
public boolean isStoreSupported() {
return true;
}
@Override
public ListenableFuture<Void> store(ArtifactInfo info, BorrowablePath output) {
stored.set(true);
// Verify the build metadata.
assertThat(info.getMetadata().get("build-metadata"), Matchers.equalTo("build-metadata"));
// Verify zip contents
try (Zip zip = new Zip(output.getPath(), /* forWriting */
false)) {
assertEquals(ImmutableSet.of("", "dir/", "buck-out/", "buck-out/bin/", "buck-out/bin/foo/", "buck-out/bin/foo/.bar/", "buck-out/bin/foo/.bar/metadata/"), zip.getDirNames());
assertEquals(ImmutableSet.of("dir/file", "file", "buck-out/bin/foo/.bar/metadata/metadata"), zip.getFileNames());
assertArrayEquals(contents, zip.readFully("file"));
assertArrayEquals(contents, zip.readFully("dir/file"));
} catch (IOException e) {
Throwables.throwIfUnchecked(e);
throw new RuntimeException(e);
}
return Futures.immediateFuture(null);
}
};
buildInfoRecorder.performUploadToArtifactCache(ImmutableSet.of(new RuleKey("aa")), cache, bus);
assertTrue(stored.get());
}
use of com.facebook.buck.artifact_cache.ArtifactCache in project buck by facebook.
the class CacheCommand method runWithoutHelp.
@Override
public int runWithoutHelp(CommandRunnerParams params) throws IOException, InterruptedException {
params.getBuckEventBus().post(ConsoleEvent.fine("cache command start"));
if (isNoCache()) {
params.getBuckEventBus().post(ConsoleEvent.severe("Caching is disabled."));
return 1;
}
List<String> arguments = getArguments();
if (arguments.isEmpty()) {
params.getBuckEventBus().post(ConsoleEvent.severe("No cache keys specified."));
return 1;
}
if (outputDir != null) {
outputPath = Optional.of(Paths.get(outputDir));
Files.createDirectories(outputPath.get());
}
ArtifactCache cache = params.getArtifactCacheFactory().newInstance();
List<RuleKey> ruleKeys = new ArrayList<>();
for (String hash : arguments) {
ruleKeys.add(new RuleKey(hash));
}
Path tmpDir = Files.createTempDirectory("buck-cache-command");
BuildEvent.Started started = BuildEvent.started(getArguments(), false);
List<ArtifactRunner> results = null;
try (CommandThreadManager pool = new CommandThreadManager("Build", getConcurrencyLimit(params.getBuckConfig()))) {
WeightedListeningExecutorService executor = pool.getExecutor();
fakeOutParseEvents(params.getBuckEventBus());
// Post the build started event, setting it to the Parser recorded start time if appropriate.
if (params.getParser().getParseStartTime().isPresent()) {
params.getBuckEventBus().post(started, params.getParser().getParseStartTime().get());
} else {
params.getBuckEventBus().post(started);
}
// Fetch all artifacts
List<ListenableFuture<ArtifactRunner>> futures = new ArrayList<>();
for (RuleKey ruleKey : ruleKeys) {
futures.add(executor.submit(new ArtifactRunner(ruleKey, tmpDir, cache)));
}
// Wait for all executions to complete or fail.
try {
results = Futures.allAsList(futures).get();
} catch (ExecutionException ex) {
params.getConsole().printBuildFailure("Failed");
ex.printStackTrace(params.getConsole().getStdErr());
}
}
int totalRuns = results.size();
String resultString = "";
int goodRuns = 0;
for (ArtifactRunner r : results) {
if (r.completed) {
goodRuns++;
}
resultString += r.resultString;
if (!outputPath.isPresent()) {
// legacy output
if (r.completed) {
params.getConsole().printSuccess(String.format("Successfully downloaded artifact with id %s at %s .", r.ruleKey, r.artifact));
} else {
params.getConsole().printErrorText(String.format("Failed to retrieve an artifact with id %s.", r.ruleKey));
}
}
}
int exitCode = (totalRuns == goodRuns) ? 0 : 1;
params.getBuckEventBus().post(BuildEvent.finished(started, exitCode));
if (outputPath.isPresent()) {
if (totalRuns == goodRuns) {
params.getConsole().printSuccess("Successfully downloaded all artifacts.");
} else {
params.getConsole().printErrorText(String.format("Downloaded %d of %d artifacts", goodRuns, totalRuns));
}
params.getConsole().getStdOut().println(resultString);
}
return exitCode;
}
use of com.facebook.buck.artifact_cache.ArtifactCache in project buck by facebook.
the class DefaultJavaLibraryIntegrationTest method testBuildJavaLibraryWithoutSrcsAndVerifyAbi.
@Test
public void testBuildJavaLibraryWithoutSrcsAndVerifyAbi() throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "abi", tmp);
workspace.setUp();
workspace.enableDirCache();
// Run `buck build`.
BuildTarget target = BuildTargetFactory.newInstance("//:no_srcs");
ProcessResult buildResult = workspace.runBuckCommand("build", target.getFullyQualifiedName());
buildResult.assertSuccess("Successful build should exit with 0.");
Path outputPath = BuildTargets.getGenPath(filesystem, target, "lib__%s__output/" + target.getShortName() + ".jar");
Path outputFile = workspace.getPath(outputPath);
assertTrue(Files.exists(outputFile));
// TODO(bolinfest): When we produce byte-for-byte identical JAR files across builds, do:
//
// HashCode hashOfOriginalJar = Files.hash(outputFile, Hashing.sha1());
//
// And then compare that to the output when //:no_srcs is built again with --no-cache.
long sizeOfOriginalJar = Files.size(outputFile);
// This verifies that the ABI key was written correctly.
workspace.verify();
// Verify the build cache.
Path buildCache = workspace.getPath(filesystem.getBuckPaths().getCacheDir());
assertTrue(Files.isDirectory(buildCache));
ArtifactCache dirCache = TestArtifactCaches.createDirCacheForTest(workspace.getDestPath(), buildCache);
int totalArtifactsCount = DirArtifactCacheTestUtil.getAllFilesInCache(dirCache).size();
assertEquals("There should be two entries (a zip and metadata) in the build cache.", 2, totalArtifactsCount);
// Run `buck clean`.
ProcessResult cleanResult = workspace.runBuckCommand("clean");
cleanResult.assertSuccess("Successful clean should exit with 0.");
totalArtifactsCount = getAllFilesInPath(buildCache).size();
assertEquals("The build cache should still exist.", 2, totalArtifactsCount);
// Corrupt the build cache!
File artifactZip = FluentIterable.from(ImmutableList.copyOf(DirArtifactCacheTestUtil.getAllFilesInCache(dirCache))).toSortedList(Ordering.natural()).get(0).toFile();
FileSystem zipFs = FileSystems.newFileSystem(artifactZip.toPath(), /* loader */
null);
Path outputInZip = zipFs.getPath("/" + outputPath.toString());
Files.write(outputInZip, "Hello world!".getBytes(), WRITE);
zipFs.close();
// Run `buck build` again.
ProcessResult buildResult2 = workspace.runBuckCommand("build", target.getFullyQualifiedName());
buildResult2.assertSuccess("Successful build should exit with 0.");
assertTrue(Files.isRegularFile(outputFile));
assertEquals("The content of the output file will be 'Hello World!' if it is read from the build cache.", "Hello world!", new String(Files.readAllBytes(outputFile), UTF_8));
// Run `buck clean` followed by `buck build` yet again, but this time, specify `--no-cache`.
ProcessResult cleanResult2 = workspace.runBuckCommand("clean");
cleanResult2.assertSuccess("Successful clean should exit with 0.");
ProcessResult buildResult3 = workspace.runBuckCommand("build", "--no-cache", target.getFullyQualifiedName());
buildResult3.assertSuccess();
assertNotEquals("The contents of the file should no longer be pulled from the corrupted build cache.", "Hello world!", new String(Files.readAllBytes(outputFile), UTF_8));
assertEquals("We cannot do a byte-for-byte comparision with the original JAR because timestamps might " + "have changed, but we verify that they are the same size, as a proxy.", sizeOfOriginalJar, Files.size(outputFile));
}
use of com.facebook.buck.artifact_cache.ArtifactCache in project buck by facebook.
the class Build method executeBuild.
/**
* If {@code isKeepGoing} is false, then this returns a future that succeeds only if all of
* {@code rulesToBuild} build successfully. Otherwise, this returns a future that should always
* succeed, even if individual rules fail to build. In that case, a failed build rule is indicated
* by a {@code null} value in the corresponding position in the iteration order of
* {@code rulesToBuild}.
* @param targetish The targets to build. All targets in this iterable must be unique.
*/
@SuppressWarnings("PMD.EmptyCatchBlock")
public BuildExecutionResult executeBuild(Iterable<? extends BuildTarget> targetish, boolean isKeepGoing) throws IOException, ExecutionException, InterruptedException {
BuildId buildId = executionContext.getBuildId();
BuildEngineBuildContext buildContext = BuildEngineBuildContext.builder().setBuildContext(BuildContext.builder().setActionGraph(actionGraph).setSourcePathResolver(new SourcePathResolver(new SourcePathRuleFinder(ruleResolver))).setJavaPackageFinder(javaPackageFinder).setEventBus(executionContext.getBuckEventBus()).setAndroidPlatformTargetSupplier(executionContext.getAndroidPlatformTargetSupplier()).build()).setClock(clock).setArtifactCache(artifactCache).setBuildId(buildId).setObjectMapper(objectMapper).putAllEnvironment(executionContext.getEnvironment()).setKeepGoing(isKeepGoing).build();
// It is important to use this logic to determine the set of rules to build rather than
// build.getActionGraph().getNodesWithNoIncomingEdges() because, due to graph enhancement,
// there could be disconnected subgraphs in the DependencyGraph that we do not want to build.
ImmutableSet<BuildTarget> targetsToBuild = StreamSupport.stream(targetish.spliterator(), false).collect(MoreCollectors.toImmutableSet());
// It is important to use this logic to determine the set of rules to build rather than
// build.getActionGraph().getNodesWithNoIncomingEdges() because, due to graph enhancement,
// there could be disconnected subgraphs in the DependencyGraph that we do not want to build.
ImmutableList<BuildRule> rulesToBuild = ImmutableList.copyOf(targetsToBuild.stream().map(buildTarget -> {
try {
return getRuleResolver().requireRule(buildTarget);
} catch (NoSuchBuildTargetException e) {
throw new HumanReadableException("No build rule found for target %s", buildTarget);
}
}).collect(MoreCollectors.toImmutableSet()));
// Calculate and post the number of rules that need to built.
int numRules = buildEngine.getNumRulesToBuild(rulesToBuild);
getExecutionContext().getBuckEventBus().post(BuildEvent.ruleCountCalculated(targetsToBuild, numRules));
// Setup symlinks required when configuring the output path.
createConfiguredBuckOutSymlinks();
List<ListenableFuture<BuildResult>> futures = rulesToBuild.stream().map(rule -> buildEngine.build(buildContext, executionContext, rule)).collect(MoreCollectors.toImmutableList());
// Get the Future representing the build and then block until everything is built.
ListenableFuture<List<BuildResult>> buildFuture = Futures.allAsList(futures);
List<BuildResult> results;
try {
results = buildFuture.get();
if (!isKeepGoing) {
for (BuildResult result : results) {
Throwable thrown = result.getFailure();
if (thrown != null) {
throw new ExecutionException(thrown);
}
}
}
} catch (ExecutionException | InterruptedException | RuntimeException e) {
Throwable t = Throwables.getRootCause(e);
if (e instanceof InterruptedException || t instanceof InterruptedException || t instanceof ClosedByInterruptException) {
try {
buildFuture.cancel(true);
} catch (CancellationException ignored) {
// Rethrow original InterruptedException instead.
}
Thread.currentThread().interrupt();
}
throw e;
}
// Insertion order matters
LinkedHashMap<BuildRule, Optional<BuildResult>> resultBuilder = new LinkedHashMap<>();
Preconditions.checkState(rulesToBuild.size() == results.size());
for (int i = 0, len = rulesToBuild.size(); i < len; i++) {
BuildRule rule = rulesToBuild.get(i);
resultBuilder.put(rule, Optional.ofNullable(results.get(i)));
}
return BuildExecutionResult.builder().setFailures(FluentIterable.from(results).filter(input -> input.getSuccess() == null)).setResults(resultBuilder).build();
}
use of com.facebook.buck.artifact_cache.ArtifactCache in project buck by facebook.
the class CacheCommandTest method testRunCommandAndFetchArtifactsSuccessfully.
@Test
public void testRunCommandAndFetchArtifactsSuccessfully() throws IOException, InterruptedException {
final String ruleKeyHash = "b64009ae3762a42a1651c139ec452f0d18f48e21";
ArtifactCache cache = createMock(ArtifactCache.class);
expect(cache.fetch(eq(new RuleKey(ruleKeyHash)), isA(LazyPath.class))).andReturn(CacheResult.hit("http"));
TestConsole console = new TestConsole();
CommandRunnerParams commandRunnerParams = CommandRunnerParamsForTesting.builder().setConsole(console).setArtifactCache(cache).build();
replayAll();
CacheCommand cacheCommand = new CacheCommand();
cacheCommand.setArguments(ImmutableList.of(ruleKeyHash));
int exitCode = cacheCommand.run(commandRunnerParams);
assertEquals(0, exitCode);
assertThat(console.getTextWrittenToStdErr(), startsWith("Successfully downloaded artifact with id " + ruleKeyHash + " at "));
}
Aggregations