use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class ShardWorkerContext method uploadOutputFile.
private void uploadOutputFile(ActionResult.Builder resultBuilder, String outputFile, Path actionRoot, PreconditionFailure.Builder preconditionFailure) throws IOException, InterruptedException {
Path outputPath = actionRoot.resolve(outputFile);
if (!Files.exists(outputPath)) {
logger.log(Level.FINE, "ReportResultStage: " + outputFile + " does not exist...");
return;
}
if (Files.isDirectory(outputPath)) {
logger.log(Level.FINE, "ReportResultStage: " + outputFile + " is a directory");
preconditionFailure.addViolationsBuilder().setType(VIOLATION_TYPE_INVALID).setSubject(outputFile).setDescription("An output file was a directory");
return;
}
long size = Files.size(outputPath);
long maxEntrySize = execFileSystem.getStorage().maxEntrySize();
if (maxEntrySize != UNLIMITED_ENTRY_SIZE_MAX && size > maxEntrySize) {
preconditionFailure.addViolationsBuilder().setType(VIOLATION_TYPE_MISSING).setSubject(outputFile + ": " + size).setDescription("An output could not be uploaded because it exceeded the maximum size of an entry");
return;
}
// will run into issues if we end up blocking on the cache insertion, might
// want to decrement input references *before* this to ensure that we cannot
// cause an internal deadlock
Digest digest;
try {
digest = getDigestUtil().compute(outputPath);
} catch (NoSuchFileException e) {
return;
}
resultBuilder.addOutputFilesBuilder().setPath(outputFile).setDigest(digest).setIsExecutable(Files.isExecutable(outputPath));
try {
insertFile(digest, outputPath);
} catch (EntryLimitException e) {
preconditionFailure.addViolationsBuilder().setType(VIOLATION_TYPE_MISSING).setSubject("blobs/" + DigestUtil.toString(digest)).setDescription("An output could not be uploaded because it exceeded the maximum size of an entry");
}
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class GrpcCASTest method writeIsResumable.
@Test
public void writeIsResumable() throws Exception {
UUID uuid = UUID.randomUUID();
ByteString writeContent = ByteString.copyFromUtf8("written");
Digest digest = DIGEST_UTIL.compute(writeContent);
String instanceName = "test";
HashCode hash = HashCode.fromString(digest.getHash());
String resourceName = ByteStreamUploader.uploadResourceName(instanceName, uuid, hash, digest.getSizeBytes());
// better test might just put a full gRPC CAS behind an in-process and validate state
SettableFuture<ByteString> content = SettableFuture.create();
serviceRegistry.addService(new ByteStreamServiceWriter(resourceName, content, (int) digest.getSizeBytes()));
Channel channel = InProcessChannelBuilder.forName(fakeServerName).directExecutor().build();
GrpcCAS cas = new GrpcCAS(instanceName, channel, /* uploader=*/
null, onExpirations);
RequestMetadata requestMetadata = RequestMetadata.getDefaultInstance();
Write initialWrite = cas.getWrite(digest, uuid, requestMetadata);
try (OutputStream writeOut = initialWrite.getOutput(1, SECONDS, () -> {
})) {
writeContent.substring(0, 4).writeTo(writeOut);
}
Write finalWrite = cas.getWrite(digest, uuid, requestMetadata);
try (OutputStream writeOut = finalWrite.getOutput(1, SECONDS, () -> {
})) {
writeContent.substring(4).writeTo(writeOut);
}
assertThat(content.get(1, TimeUnit.SECONDS)).isEqualTo(writeContent);
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class GrpcCASTest method putAddsExpiration.
@Test
public void putAddsExpiration() throws IOException, InterruptedException {
ByteString uploadContent = ByteString.copyFromUtf8("uploaded");
Digest digest = DIGEST_UTIL.compute(uploadContent);
String instanceName = "test";
ListMultimap<Digest, Runnable> onExpirations = MultimapBuilder.hashKeys().arrayListValues().build();
Channel channel = InProcessChannelBuilder.forName(fakeServerName).directExecutor().build();
ByteStreamUploader uploader = mock(ByteStreamUploader.class);
GrpcCAS cas = new GrpcCAS(instanceName, channel, uploader, onExpirations);
Runnable onExpiration = mock(Runnable.class);
cas.put(new Blob(uploadContent, digest), onExpiration);
verify(uploader, times(1)).uploadBlob(eq(HashCode.fromString(digest.getHash())), any(Chunker.class));
assertThat(onExpirations.get(digest)).containsExactly(onExpiration);
verifyZeroInteractions(onExpiration);
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class MemoryWriteOutputStreamTest method asyncWriteCompletionIsComplete.
@Test
public void asyncWriteCompletionIsComplete() throws IOException {
ContentAddressableStorage cas = mock(ContentAddressableStorage.class);
ByteString content = ByteString.copyFromUtf8("Hello, World!");
Digest digest = DIGEST_UTIL.compute(content);
SettableFuture<ByteString> writtenFuture = SettableFuture.create();
Write write = new MemoryWriteOutputStream(cas, digest, writtenFuture);
content.substring(0, 6).writeTo(write.getOutput(1, TimeUnit.SECONDS, () -> {
}));
writtenFuture.set(content);
assertThat(write.isComplete()).isTrue();
assertThat(write.getCommittedSize()).isEqualTo(digest.getSizeBytes());
verifyZeroInteractions(cas);
}
use of com.google.cloud.kms.v1.Digest in project bazel-buildfarm by bazelbuild.
the class CASFileCacheTest method putDirectoryIOExceptionRollsBack.
@Test
public void putDirectoryIOExceptionRollsBack() throws IOException, InterruptedException {
ByteString file = ByteString.copyFromUtf8("Peanut Butter");
Digest fileDigest = DIGEST_UTIL.compute(file);
// omitting blobs.put to incur IOException
Directory subDirectory = Directory.getDefaultInstance();
Digest subdirDigest = DIGEST_UTIL.compute(subDirectory);
Directory directory = Directory.newBuilder().addFiles(FileNode.newBuilder().setName("file").setDigest(fileDigest).build()).addDirectories(DirectoryNode.newBuilder().setName("subdir").setDigest(subdirDigest).build()).build();
Digest dirDigest = DIGEST_UTIL.compute(directory);
Map<Digest, Directory> directoriesIndex = ImmutableMap.of(dirDigest, directory, subdirDigest, subDirectory);
boolean exceptionHandled = false;
try {
getInterruptiblyOrIOException(fileCache.putDirectory(dirDigest, directoriesIndex, putService));
} catch (PutDirectoryException e) {
exceptionHandled = true;
}
assertThat(exceptionHandled).isTrue();
assertThat(Files.exists(fileCache.getDirectoryPath(dirDigest))).isFalse();
}
Aggregations