use of com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest in project bazel by bazelbuild.
the class GrpcActionCache method downloadAllResults.
/**
* Download all results of a remotely executed action locally. TODO(olaola): will need to amend to
* include the {@link com.google.devtools.build.lib.remote.TreeNodeRepository} for updating.
*/
@Override
public void downloadAllResults(ActionResult result, Path execRoot) throws IOException, CacheNotFoundException {
// Send all the file requests in a single synchronous batch.
// TODO(olaola): profile to maybe replace with separate concurrent requests.
CasDownloadBlobRequest.Builder request = CasDownloadBlobRequest.newBuilder();
Map<ContentDigest, Pair<Path, FileMetadata>> metadataMap = new HashMap<>();
for (Output output : result.getOutputList()) {
Path path = execRoot.getRelative(output.getPath());
if (output.getContentCase() == ContentCase.FILE_METADATA) {
FileMetadata fileMetadata = output.getFileMetadata();
ContentDigest digest = fileMetadata.getDigest();
if (digest.getSizeBytes() > 0) {
request.addDigest(digest);
metadataMap.put(digest, Pair.of(path, fileMetadata));
} else {
// Handle empty file locally.
FileSystemUtils.createDirectoryAndParents(path.getParentDirectory());
FileSystemUtils.writeContent(path, new byte[0]);
}
} else {
downloadTree(output.getDigest(), path);
}
}
Iterator<CasDownloadReply> replies = getBlockingStub().downloadBlob(request.build());
Set<ContentDigest> results = new HashSet<>();
while (replies.hasNext()) {
results.add(createFileFromStream(metadataMap, replies));
}
for (ContentDigest digest : metadataMap.keySet()) {
if (!results.contains(digest)) {
throw new CacheNotFoundException(digest);
}
}
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest in project bazel by bazelbuild.
the class GrpcActionCache method uploadAllResults.
/** Upload all results of a locally executed action to the cache. */
@Override
public void uploadAllResults(Path execRoot, Collection<Path> files, ActionResult.Builder result) throws IOException, InterruptedException {
ArrayList<ContentDigest> digests = new ArrayList<>();
for (Path file : files) {
digests.add(ContentDigests.computeDigest(file));
}
ImmutableSet<ContentDigest> missing = getMissingDigests(digests);
if (!missing.isEmpty()) {
uploadChunks(missing.size(), new BlobChunkFileIterator(missing, files.iterator()));
}
int index = 0;
for (Path file : files) {
if (file.isDirectory()) {
// TreeNodeRepository to call uploadTree.
throw new UnsupportedOperationException("Storing a directory is not yet supported.");
}
// Add to protobuf.
result.addOutputBuilder().setPath(file.relativeTo(execRoot).getPathString()).getFileMetadataBuilder().setDigest(digests.get(index++)).setExecutable(file.isExecutable());
}
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest in project bazel by bazelbuild.
the class GrpcActionCache method uploadTree.
/**
* Upload enough of the tree metadata and data into remote cache so that the entire tree can be
* reassembled remotely using the root digest.
*/
@Override
public void uploadTree(TreeNodeRepository repository, Path execRoot, TreeNode root) throws IOException, InterruptedException {
repository.computeMerkleDigests(root);
// TODO(olaola): avoid querying all the digests, only ask for novel subtrees.
ImmutableSet<ContentDigest> missingDigests = getMissingDigests(repository.getAllDigests(root));
// Only upload data that was missing from the cache.
ArrayList<ActionInput> actionInputs = new ArrayList<>();
ArrayList<FileNode> treeNodes = new ArrayList<>();
repository.getDataFromDigests(missingDigests, actionInputs, treeNodes);
if (!treeNodes.isEmpty()) {
CasUploadTreeMetadataRequest.Builder metaRequest = CasUploadTreeMetadataRequest.newBuilder().addAllTreeNode(treeNodes);
CasUploadTreeMetadataReply reply = getBlockingStub().uploadTreeMetadata(metaRequest.build());
if (!reply.getStatus().getSucceeded()) {
throw new RuntimeException(reply.getStatus().getErrorDetail());
}
}
if (!actionInputs.isEmpty()) {
ArrayList<Path> paths = new ArrayList<>();
for (ActionInput actionInput : actionInputs) {
paths.add(execRoot.getRelative(actionInput.getExecPathString()));
}
uploadChunks(paths.size(), new BlobChunkFileIterator(missingDigests, paths.iterator()));
}
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest in project bazel by bazelbuild.
the class GrpcActionCache method createFileFromStream.
private ContentDigest createFileFromStream(Map<ContentDigest, Pair<Path, FileMetadata>> metadataMap, Iterator<CasDownloadReply> replies) throws IOException, CacheNotFoundException {
Preconditions.checkArgument(replies.hasNext());
CasDownloadReply reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
BlobChunk chunk = reply.getData();
ContentDigest digest = chunk.getDigest();
Preconditions.checkArgument(metadataMap.containsKey(digest));
Pair<Path, FileMetadata> metadata = metadataMap.get(digest);
Path path = metadata.first;
FileSystemUtils.createDirectoryAndParents(path.getParentDirectory());
try (OutputStream stream = path.getOutputStream()) {
ByteString data = chunk.getData();
data.writeTo(stream);
long bytesLeft = digest.getSizeBytes() - data.size();
while (bytesLeft > 0) {
Preconditions.checkArgument(replies.hasNext());
reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
chunk = reply.getData();
data = chunk.getData();
Preconditions.checkArgument(!chunk.hasDigest());
Preconditions.checkArgument(chunk.getOffset() == digest.getSizeBytes() - bytesLeft);
data.writeTo(stream);
bytesLeft -= data.size();
}
path.setExecutable(metadata.second.getExecutable());
}
return digest;
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.ContentDigest in project bazel by bazelbuild.
the class GrpcActionCache method uploadBlob.
@Override
public ContentDigest uploadBlob(byte[] blob) throws InterruptedException {
ContentDigest digest = ContentDigests.computeDigest(blob);
ImmutableSet<ContentDigest> missing = getMissingDigests(ImmutableList.of(digest));
try {
if (!missing.isEmpty()) {
uploadChunks(1, new BlobChunkInlineIterator(blob));
}
return digest;
} catch (IOException e) {
// This will never happen.
throw new RuntimeException();
}
}
Aggregations