use of com.google.devtools.build.lib.remote.RemoteProtocol.FileNode in project bazel by bazelbuild.
the class ConcurrentMapActionCache method downloadTree.
@Override
public void downloadTree(ContentDigest rootDigest, Path rootLocation) throws IOException, CacheNotFoundException {
FileNode fileNode = FileNode.parseFrom(downloadBlob(rootDigest));
if (fileNode.hasFileMetadata()) {
FileMetadata meta = fileNode.getFileMetadata();
downloadFileContents(meta.getDigest(), rootLocation, meta.getExecutable());
}
for (FileNode.Child child : fileNode.getChildList()) {
downloadTree(child.getDigest(), rootLocation.getRelative(child.getPath()));
}
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.FileNode in project bazel by bazelbuild.
the class GrpcActionCache method uploadTree.
/**
* Upload enough of the tree metadata and data into remote cache so that the entire tree can be
* reassembled remotely using the root digest.
*/
@Override
public void uploadTree(TreeNodeRepository repository, Path execRoot, TreeNode root) throws IOException, InterruptedException {
repository.computeMerkleDigests(root);
// TODO(olaola): avoid querying all the digests, only ask for novel subtrees.
ImmutableSet<ContentDigest> missingDigests = getMissingDigests(repository.getAllDigests(root));
// Only upload data that was missing from the cache.
ArrayList<ActionInput> actionInputs = new ArrayList<>();
ArrayList<FileNode> treeNodes = new ArrayList<>();
repository.getDataFromDigests(missingDigests, actionInputs, treeNodes);
if (!treeNodes.isEmpty()) {
CasUploadTreeMetadataRequest.Builder metaRequest = CasUploadTreeMetadataRequest.newBuilder().addAllTreeNode(treeNodes);
CasUploadTreeMetadataReply reply = getBlockingStub().uploadTreeMetadata(metaRequest.build());
if (!reply.getStatus().getSucceeded()) {
throw new RuntimeException(reply.getStatus().getErrorDetail());
}
}
if (!actionInputs.isEmpty()) {
ArrayList<Path> paths = new ArrayList<>();
for (ActionInput actionInput : actionInputs) {
paths.add(execRoot.getRelative(actionInput.getExecPathString()));
}
uploadChunks(paths.size(), new BlobChunkFileIterator(missingDigests, paths.iterator()));
}
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.FileNode in project bazel by bazelbuild.
the class TreeNodeRepository method getOrComputeFileNode.
private synchronized FileNode getOrComputeFileNode(TreeNode node) throws IOException {
// Assumes all child digests have already been computed!
FileNode fileNode = fileNodeCache.get(node);
if (fileNode == null) {
FileNode.Builder b = FileNode.newBuilder();
if (node.isLeaf()) {
ContentDigest fileDigest = fileContentsDigestCache.get(node.getActionInput());
Preconditions.checkState(fileDigest != null);
b.getFileMetadataBuilder().setDigest(fileDigest).setExecutable(execRoot.getRelative(node.getActionInput().getExecPathString()).isExecutable());
} else {
for (TreeNode.ChildEntry entry : node.getChildEntries()) {
ContentDigest childDigest = treeNodeDigestCache.get(entry.getChild());
Preconditions.checkState(childDigest != null);
b.addChildBuilder().setPath(entry.getSegment()).setDigest(childDigest);
}
}
fileNode = b.build();
fileNodeCache.put(node, fileNode);
ContentDigest digest = ContentDigests.computeDigest(fileNode);
treeNodeDigestCache.put(node, digest);
digestTreeNodeCache.put(digest, node);
}
return fileNode;
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.FileNode in project bazel by bazelbuild.
the class TreeNodeRepositoryTest method testMerkleDigests.
@Test
public void testMerkleDigests() throws Exception {
Artifact foo = new Artifact(scratch.file("/exec/root/a/foo", "1"), rootDir);
Artifact bar = new Artifact(scratch.file("/exec/root/a/bar", "11"), rootDir);
TreeNodeRepository repo = new TreeNodeRepository(rootDir.getPath());
TreeNode root = repo.buildFromActionInputs(ImmutableList.<ActionInput>of(foo, bar));
TreeNode aNode = root.getChildEntries().get(0).getChild();
// foo > bar in sort order!
TreeNode fooNode = aNode.getChildEntries().get(1).getChild();
TreeNode barNode = aNode.getChildEntries().get(0).getChild();
repo.computeMerkleDigests(root);
ImmutableCollection<ContentDigest> digests = repo.getAllDigests(root);
ContentDigest rootDigest = repo.getMerkleDigest(root);
ContentDigest aDigest = repo.getMerkleDigest(aNode);
ContentDigest fooDigest = repo.getMerkleDigest(fooNode);
ContentDigest fooContentsDigest = ContentDigests.computeDigest(foo.getPath());
ContentDigest barDigest = repo.getMerkleDigest(barNode);
ContentDigest barContentsDigest = ContentDigests.computeDigest(bar.getPath());
assertThat(digests).containsExactly(rootDigest, aDigest, barDigest, barContentsDigest, fooDigest, fooContentsDigest);
ArrayList<FileNode> fileNodes = new ArrayList<>();
ArrayList<ActionInput> actionInputs = new ArrayList<>();
repo.getDataFromDigests(digests, actionInputs, fileNodes);
assertThat(actionInputs).containsExactly(bar, foo);
assertThat(fileNodes).hasSize(4);
FileNode rootFileNode = fileNodes.get(0);
assertThat(rootFileNode.getChild(0).getPath()).isEqualTo("a");
assertThat(rootFileNode.getChild(0).getDigest()).isEqualTo(aDigest);
FileNode aFileNode = fileNodes.get(1);
assertThat(aFileNode.getChild(0).getPath()).isEqualTo("bar");
assertThat(aFileNode.getChild(0).getDigest()).isEqualTo(barDigest);
assertThat(aFileNode.getChild(1).getPath()).isEqualTo("foo");
assertThat(aFileNode.getChild(1).getDigest()).isEqualTo(fooDigest);
FileNode barFileNode = fileNodes.get(2);
assertThat(barFileNode.getFileMetadata().getDigest()).isEqualTo(barContentsDigest);
FileNode fooFileNode = fileNodes.get(3);
assertThat(fooFileNode.getFileMetadata().getDigest()).isEqualTo(fooContentsDigest);
}
Aggregations