use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class Executor method executeActions.
static void executeActions(String instanceName, List<Digest> actionDigests, ExecutionStub execStub) throws InterruptedException {
ScheduledExecutorService service = newSingleThreadScheduledExecutor();
AtomicInteger[] statusCounts = new AtomicInteger[18];
for (int i = 0; i < statusCounts.length; i++) {
statusCounts[i] = new AtomicInteger(0);
}
AtomicLong countdown = new AtomicLong(actionDigests.size());
for (Digest actionDigest : actionDigests) {
ExecutionObserver executionObserver = new ExecutionObserver(countdown, statusCounts, execStub, instanceName, actionDigest, service);
executionObserver.execute();
MICROSECONDS.sleep(1);
}
while (countdown.get() != 0) {
SECONDS.sleep(1);
}
for (int i = 0; i < statusCounts.length; i++) {
AtomicInteger statusCount = statusCounts[i];
if (statusCount.get() != 0) {
System.out.println("Status " + Code.forNumber(i) + " : " + statusCount.get() + " responses");
}
}
shutdownAndAwaitTermination(service, 1, SECONDS);
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class Extract method directoryGetter.
static Runnable directoryGetter(Path root, String instanceName, Digest digest, Set<Digest> visitedDirectories, Set<Digest> visitedDigests, ByteStreamStub bsStub, Executor executor, AtomicLong outstandingOperations, ListeningScheduledExecutorService retryService) {
return new Runnable() {
@Override
public void run() {
try {
runInterruptibly();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
outstandingOperations.getAndDecrement();
}
}
void runInterruptibly() throws InterruptedException {
try {
ByteString content = getBlobIntoFile("directory", instanceName, digest, bsStub, root);
handleDirectory(Directory.parseFrom(content));
} catch (IOException e) {
e.printStackTrace();
}
}
void handleDirectory(Directory directory) {
for (FileNode fileNode : directory.getFilesList()) {
Digest fileDigest = fileNode.getDigest();
if (!visitedDigests.contains(fileDigest)) {
visitedDigests.add(fileDigest);
outstandingOperations.getAndIncrement();
executor.execute(blobGetter(root, instanceName, fileDigest, bsStub, outstandingOperations, retryService));
}
}
for (DirectoryNode directoryNode : directory.getDirectoriesList()) {
Digest directoryDigest = directoryNode.getDigest();
// we may have seen this digest, but now we will have seen it as a directory
if (!visitedDirectories.contains(directoryDigest)) {
// probably won't collide with other writers, with single thread
visitedDigests.add(directoryDigest);
visitedDirectories.add(directoryDigest);
outstandingOperations.getAndIncrement();
executor.execute(directoryGetter(root, instanceName, directoryDigest, visitedDirectories, visitedDigests, bsStub, executor, outstandingOperations, retryService));
}
}
}
};
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class OperationQueueWorkerContext method uploadManifest.
private static void uploadManifest(UploadManifest manifest, ByteStreamUploader uploader) throws IOException, InterruptedException {
Map<HashCode, Chunker> filesToUpload = Maps.newHashMap();
Map<Digest, Path> digestToFile = manifest.getDigestToFile();
Map<Digest, Chunker> digestToChunkers = manifest.getDigestToChunkers();
Collection<Digest> digests = new ArrayList<>();
digests.addAll(digestToFile.keySet());
digests.addAll(digestToChunkers.keySet());
for (Digest digest : digests) {
Chunker chunker;
Path file = digestToFile.get(digest);
if (file != null) {
chunker = Chunker.builder().setInput(digest.getSizeBytes(), file).build();
} else {
chunker = digestToChunkers.get(digest);
if (chunker == null) {
String message = "FindMissingBlobs call returned an unknown digest: " + digest;
throw new IOException(message);
}
}
filesToUpload.put(HashCode.fromString(digest.getHash()), chunker);
}
if (!filesToUpload.isEmpty()) {
uploader.uploadBlobs(filesToUpload);
}
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class OperationQueueWorkerContext method createExecDir.
@Override
public Path createExecDir(String operationName, Map<Digest, Directory> directoriesIndex, Action action, Command command) throws IOException, InterruptedException {
OutputDirectory outputDirectory = OutputDirectory.parse(command.getOutputFilesList(), command.getOutputDirectoriesList(), command.getEnvironmentVariablesList());
Path execDir = root.resolve(operationName);
if (Files.exists(execDir)) {
Directories.remove(execDir);
}
Files.createDirectories(execDir);
ImmutableList.Builder<String> inputFiles = new ImmutableList.Builder<>();
ImmutableList.Builder<Digest> inputDirectories = new ImmutableList.Builder<>();
boolean fetched = false;
try {
fetchInputs(execDir, action.getInputRootDigest(), directoriesIndex, outputDirectory, inputFiles, inputDirectories);
fetched = true;
} finally {
if (!fetched) {
fileCache.decrementReferences(inputFiles.build(), inputDirectories.build());
}
}
rootInputFiles.put(execDir, inputFiles.build());
rootInputDirectories.put(execDir, inputDirectories.build());
boolean stamped = false;
try {
outputDirectory.stamp(execDir);
stamped = true;
} finally {
if (!stamped) {
destroyExecDir(execDir);
}
}
if (owner != null) {
Directories.setAllOwner(execDir, owner);
}
return execDir;
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class OperationQueueWorkerContext method fetchInputs.
private void fetchInputs(Path execDir, Digest inputRoot, Map<Digest, Directory> directoriesIndex, OutputDirectory outputDirectory, ImmutableList.Builder<String> inputFiles, ImmutableList.Builder<Digest> inputDirectories) throws IOException, InterruptedException {
Directory directory;
if (inputRoot.getSizeBytes() == 0) {
directory = Directory.getDefaultInstance();
} else {
directory = directoriesIndex.get(inputRoot);
if (directory == null) {
throw new IOException("Directory " + DigestUtil.toString(inputRoot) + " is not in input index");
}
}
getInterruptiblyOrIOException(allAsList(fileCache.putFiles(directory.getFilesList(), directory.getSymlinksList(), execDir, inputFiles, newDirectExecutorService())));
for (DirectoryNode directoryNode : directory.getDirectoriesList()) {
Digest digest = directoryNode.getDigest();
String name = directoryNode.getName();
OutputDirectory childOutputDirectory = outputDirectory != null ? outputDirectory.getChild(name) : null;
Path dirPath = execDir.resolve(name);
if (childOutputDirectory != null || !config.getLinkInputDirectories()) {
Files.createDirectories(dirPath);
fetchInputs(dirPath, digest, directoriesIndex, childOutputDirectory, inputFiles, inputDirectories);
} else {
inputDirectories.add(digest);
linkDirectory(dirPath, digest, directoriesIndex);
}
}
}
Aggregations