use of com.google.devtools.build.lib.remote.RemoteProtocol.BlobChunk in project bazel by bazelbuild.
the class GrpcActionCache method createFileFromStream.
private ContentDigest createFileFromStream(Map<ContentDigest, Pair<Path, FileMetadata>> metadataMap, Iterator<CasDownloadReply> replies) throws IOException, CacheNotFoundException {
Preconditions.checkArgument(replies.hasNext());
CasDownloadReply reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
BlobChunk chunk = reply.getData();
ContentDigest digest = chunk.getDigest();
Preconditions.checkArgument(metadataMap.containsKey(digest));
Pair<Path, FileMetadata> metadata = metadataMap.get(digest);
Path path = metadata.first;
FileSystemUtils.createDirectoryAndParents(path.getParentDirectory());
try (OutputStream stream = path.getOutputStream()) {
ByteString data = chunk.getData();
data.writeTo(stream);
long bytesLeft = digest.getSizeBytes() - data.size();
while (bytesLeft > 0) {
Preconditions.checkArgument(replies.hasNext());
reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
chunk = reply.getData();
data = chunk.getData();
Preconditions.checkArgument(!chunk.hasDigest());
Preconditions.checkArgument(chunk.getOffset() == digest.getSizeBytes() - bytesLeft);
data.writeTo(stream);
bytesLeft -= data.size();
}
path.setExecutable(metadata.second.getExecutable());
}
return digest;
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.BlobChunk in project bazel by bazelbuild.
the class GrpcActionCache method downloadBlobs.
@Override
public ImmutableList<byte[]> downloadBlobs(Iterable<ContentDigest> digests) throws CacheNotFoundException {
// Send all the file requests in a single synchronous batch.
// TODO(olaola): profile to maybe replace with separate concurrent requests.
CasDownloadBlobRequest.Builder request = CasDownloadBlobRequest.newBuilder();
for (ContentDigest digest : digests) {
if (digest.getSizeBytes() > 0) {
// We handle empty blobs locally.
request.addDigest(digest);
}
}
Iterator<CasDownloadReply> replies = null;
Map<ContentDigest, byte[]> results = new HashMap<>();
int digestCount = request.getDigestCount();
if (digestCount > 0) {
replies = getBlockingStub().downloadBlob(request.build());
while (digestCount-- > 0) {
Preconditions.checkArgument(replies.hasNext());
CasDownloadReply reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
BlobChunk chunk = reply.getData();
ContentDigest digest = chunk.getDigest();
// This is not enough, but better than nothing.
Preconditions.checkArgument(digest.getSizeBytes() / 1000.0 < MAX_MEMORY_KBYTES);
byte[] result = new byte[(int) digest.getSizeBytes()];
ByteString data = chunk.getData();
data.copyTo(result, 0);
int offset = data.size();
while (offset < result.length) {
Preconditions.checkArgument(replies.hasNext());
reply = replies.next();
if (reply.hasStatus()) {
handleDownloadStatus(reply.getStatus());
}
chunk = reply.getData();
Preconditions.checkArgument(!chunk.hasDigest());
Preconditions.checkArgument(chunk.getOffset() == offset);
data = chunk.getData();
data.copyTo(result, offset);
offset += data.size();
}
results.put(digest, result);
}
}
ArrayList<byte[]> result = new ArrayList<>();
for (ContentDigest digest : digests) {
if (digest.getSizeBytes() == 0) {
result.add(new byte[0]);
continue;
}
if (!results.containsKey(digest)) {
throw new CacheNotFoundException(digest);
}
result.add(results.get(digest));
}
return ImmutableList.copyOf(result);
}
use of com.google.devtools.build.lib.remote.RemoteProtocol.BlobChunk in project bazel by bazelbuild.
the class GrpcActionCache method uploadChunks.
private void uploadChunks(int numItems, BlobChunkIterator blobs) throws InterruptedException, IOException {
// Maximal number of batches.
CountDownLatch finishLatch = new CountDownLatch(numItems);
AtomicReference<RuntimeException> exception = new AtomicReference<>(null);
UploadBlobReplyStreamObserver responseObserver = null;
StreamObserver<CasUploadBlobRequest> requestObserver = null;
int currentBatchBytes = 0;
int batchedInputs = 0;
int batches = 0;
CasServiceStub stub = getStub();
try {
while (blobs.hasNext()) {
BlobChunk chunk = blobs.next();
if (chunk.hasDigest()) {
// Determine whether to start next batch.
final long batchSize = chunk.getDigest().getSizeBytes() + currentBatchBytes;
if (batchedInputs % options.grpcMaxBatchInputs == 0 || batchSize > options.grpcMaxBatchSizeBytes) {
// The batches execute simultaneously.
if (requestObserver != null) {
batchedInputs = 0;
currentBatchBytes = 0;
requestObserver.onCompleted();
}
batches++;
responseObserver = new UploadBlobReplyStreamObserver(finishLatch, exception);
requestObserver = stub.uploadBlob(responseObserver);
}
batchedInputs++;
}
currentBatchBytes += chunk.getData().size();
requestObserver.onNext(CasUploadBlobRequest.newBuilder().setData(chunk).build());
if (finishLatch.getCount() == 0) {
// RPC completed or errored before we finished sending.
throw new RuntimeException("gRPC terminated prematurely: " + (exception.get() != null ? exception.get() : "unknown cause"));
}
}
} catch (RuntimeException e) {
// Cancel RPC
if (requestObserver != null) {
requestObserver.onError(e);
}
throw e;
}
if (requestObserver != null) {
// Finish last batch.
requestObserver.onCompleted();
}
while (batches++ < numItems) {
// Non-sent batches.
finishLatch.countDown();
}
finishLatch.await(options.grpcTimeoutSeconds, TimeUnit.SECONDS);
if (exception.get() != null) {
// Re-throw the first encountered exception.
throw exception.get();
}
}
Aggregations