use of com.google.devtools.build.lib.vfs.Path in project bazel by bazelbuild.
the class GrpcActionCache method uploadAllResults.
/** Upload all results of a locally executed action to the cache. */
@Override
public void uploadAllResults(Path execRoot, Collection<Path> files, ActionResult.Builder result) throws IOException, InterruptedException {
ArrayList<ContentDigest> digests = new ArrayList<>();
for (Path file : files) {
digests.add(ContentDigests.computeDigest(file));
}
ImmutableSet<ContentDigest> missing = getMissingDigests(digests);
if (!missing.isEmpty()) {
uploadChunks(missing.size(), new BlobChunkFileIterator(missing, files.iterator()));
}
int index = 0;
for (Path file : files) {
if (file.isDirectory()) {
// TreeNodeRepository to call uploadTree.
throw new UnsupportedOperationException("Storing a directory is not yet supported.");
}
// Add to protobuf.
result.addOutputBuilder().setPath(file.relativeTo(execRoot).getPathString()).getFileMetadataBuilder().setDigest(digests.get(index++)).setExecutable(file.isExecutable());
}
}
use of com.google.devtools.build.lib.vfs.Path in project bazel by bazelbuild.
the class GrpcActionCache method uploadTree.
/**
* Upload enough of the tree metadata and data into remote cache so that the entire tree can be
* reassembled remotely using the root digest.
*/
@Override
public void uploadTree(TreeNodeRepository repository, Path execRoot, TreeNode root) throws IOException, InterruptedException {
repository.computeMerkleDigests(root);
// TODO(olaola): avoid querying all the digests, only ask for novel subtrees.
ImmutableSet<ContentDigest> missingDigests = getMissingDigests(repository.getAllDigests(root));
// Only upload data that was missing from the cache.
ArrayList<ActionInput> actionInputs = new ArrayList<>();
ArrayList<FileNode> treeNodes = new ArrayList<>();
repository.getDataFromDigests(missingDigests, actionInputs, treeNodes);
if (!treeNodes.isEmpty()) {
CasUploadTreeMetadataRequest.Builder metaRequest = CasUploadTreeMetadataRequest.newBuilder().addAllTreeNode(treeNodes);
CasUploadTreeMetadataReply reply = getBlockingStub().uploadTreeMetadata(metaRequest.build());
if (!reply.getStatus().getSucceeded()) {
throw new RuntimeException(reply.getStatus().getErrorDetail());
}
}
if (!actionInputs.isEmpty()) {
ArrayList<Path> paths = new ArrayList<>();
for (ActionInput actionInput : actionInputs) {
paths.add(execRoot.getRelative(actionInput.getExecPathString()));
}
uploadChunks(paths.size(), new BlobChunkFileIterator(missingDigests, paths.iterator()));
}
}
use of com.google.devtools.build.lib.vfs.Path in project bazel by bazelbuild.
the class GrpcActionCache method createFileFromStream.
private ContentDigest createFileFromStream(Map<ContentDigest, Pair<Path, FileMetadata>> metadataMap, Iterator<CasDownloadReply> replies) throws IOException, CacheNotFoundException {
Preconditions.checkArgument(replies.hasNext());
CasDownloadReply reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
BlobChunk chunk = reply.getData();
ContentDigest digest = chunk.getDigest();
Preconditions.checkArgument(metadataMap.containsKey(digest));
Pair<Path, FileMetadata> metadata = metadataMap.get(digest);
Path path = metadata.first;
FileSystemUtils.createDirectoryAndParents(path.getParentDirectory());
try (OutputStream stream = path.getOutputStream()) {
ByteString data = chunk.getData();
data.writeTo(stream);
long bytesLeft = digest.getSizeBytes() - data.size();
while (bytesLeft > 0) {
Preconditions.checkArgument(replies.hasNext());
reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
chunk = reply.getData();
data = chunk.getData();
Preconditions.checkArgument(!chunk.hasDigest());
Preconditions.checkArgument(chunk.getOffset() == digest.getSizeBytes() - bytesLeft);
data.writeTo(stream);
bytesLeft -= data.size();
}
path.setExecutable(metadata.second.getExecutable());
}
return digest;
}
use of com.google.devtools.build.lib.vfs.Path in project bazel by bazelbuild.
the class RemoteWorker method main.
public static void main(String[] args) throws Exception {
OptionsParser parser = OptionsParser.newOptionsParser(RemoteOptions.class, RemoteWorkerOptions.class);
parser.parseAndExitUponError(args);
RemoteOptions remoteOptions = parser.getOptions(RemoteOptions.class);
RemoteWorkerOptions remoteWorkerOptions = parser.getOptions(RemoteWorkerOptions.class);
if (remoteWorkerOptions.workPath == null) {
printUsage(parser);
return;
}
System.out.println("*** Initializing in-memory cache server.");
ConcurrentMap<String, byte[]> cache = ConcurrentMapFactory.isRemoteCacheOptions(remoteOptions) ? ConcurrentMapFactory.create(remoteOptions) : new ConcurrentHashMap<String, byte[]>();
System.out.println("*** Starting grpc server on all locally bound IPs on port " + remoteWorkerOptions.listenPort + ".");
Path workPath = getFileSystem().getPath(remoteWorkerOptions.workPath);
FileSystemUtils.createDirectoryAndParents(workPath);
RemoteWorker worker = new RemoteWorker(workPath, remoteWorkerOptions, new ConcurrentMapActionCache(cache));
final Server server = ServerBuilder.forPort(remoteWorkerOptions.listenPort).addService(worker.getCasServer()).addService(worker.getExecutionServer()).addService(worker.getExecCacheServer()).build();
server.start();
final Path pidFile;
if (remoteWorkerOptions.pidFile != null) {
pidFile = getFileSystem().getPath(remoteWorkerOptions.pidFile);
PrintWriter writer = new PrintWriter(pidFile.getOutputStream());
writer.append(Integer.toString(ProcessUtils.getpid()));
writer.append("\n");
writer.close();
} else {
pidFile = null;
}
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
System.err.println("*** Shutting down grpc server.");
server.shutdown();
if (pidFile != null) {
try {
pidFile.delete();
} catch (IOException e) {
System.err.println("Cannot remove pid file: " + pidFile.toString());
}
}
System.err.println("*** Server shut down.");
}
});
server.awaitTermination();
}
use of com.google.devtools.build.lib.vfs.Path in project bazel by bazelbuild.
the class BuildViewTestBase method runTestDepOnGoodTargetInBadPkgAndTransitiveCycle.
protected void runTestDepOnGoodTargetInBadPkgAndTransitiveCycle(boolean incremental) throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("parent/BUILD", "sh_library(name = 'foo',", " srcs = ['//badpkg:okay-target', '//okaypkg:transitively-a-cycle'])");
Path symlinkcycleBuildFile = scratch.file("symlinkcycle/BUILD", "sh_library(name = 'cycle', srcs = glob(['*.sh']))");
Path dirPath = symlinkcycleBuildFile.getParentDirectory();
dirPath.getRelative("foo.sh").createSymbolicLink(new PathFragment("foo.sh"));
scratch.file("okaypkg/BUILD", "sh_library(name = 'transitively-a-cycle',", " srcs = ['//symlinkcycle:cycle'])");
Path badpkgBuildFile = scratch.file("badpkg/BUILD", "exports_files(['okay-target'])", "invalidbuildsyntax");
if (incremental) {
update(defaultFlags().with(Flag.KEEP_GOING), "//okaypkg:transitively-a-cycle");
assertContainsEvent("circular symlinks detected");
eventCollector.clear();
}
update(defaultFlags().with(Flag.KEEP_GOING), "//parent:foo");
assertEquals(1, getFrequencyOfErrorsWithLocation(badpkgBuildFile.asFragment(), eventCollector));
// TODO(nharmata): This test currently only works because each BuildViewTest#update call
// dirties all FileNodes that are in error. There is actually a skyframe bug with cycle
// reporting on incremental builds (see b/14622820).
assertContainsEvent("circular symlinks detected");
}
Aggregations