use of build.bazel.remote.execution.v2.BatchUpdateBlobsResponse in project bazel-buildfarm by bazelbuild.
the class Executor method loadFilesIntoCAS.
private static void loadFilesIntoCAS(String instanceName, Channel channel, Path blobsDir) throws Exception {
ContentAddressableStorageBlockingStub casStub = ContentAddressableStorageGrpc.newBlockingStub(channel);
List<Digest> missingDigests = findMissingBlobs(instanceName, blobsDir, casStub);
UUID uploadId = UUID.randomUUID();
int[] bucketSizes = new int[128];
BatchUpdateBlobsRequest.Builder[] buckets = new BatchUpdateBlobsRequest.Builder[128];
for (int i = 0; i < 128; i++) {
bucketSizes[i] = 0;
buckets[i] = BatchUpdateBlobsRequest.newBuilder().setInstanceName(instanceName);
}
ByteStreamStub bsStub = ByteStreamGrpc.newStub(channel);
for (Digest missingDigest : missingDigests) {
Path path = blobsDir.resolve(missingDigest.getHash() + "_" + missingDigest.getSizeBytes());
if (missingDigest.getSizeBytes() < Size.mbToBytes(1)) {
Request request = Request.newBuilder().setDigest(missingDigest).setData(ByteString.copyFrom(Files.readAllBytes(path))).build();
int maxBucketSize = 0;
long minBucketSize = Size.mbToBytes(2) + 1;
int maxBucketIndex = 0;
int minBucketIndex = -1;
int size = (int) missingDigest.getSizeBytes() + 48;
for (int i = 0; i < 128; i++) {
int newBucketSize = bucketSizes[i] + size;
if (newBucketSize < Size.mbToBytes(2) && bucketSizes[i] < minBucketSize) {
minBucketSize = bucketSizes[i];
minBucketIndex = i;
}
if (bucketSizes[i] > maxBucketSize) {
maxBucketSize = bucketSizes[i];
maxBucketIndex = i;
}
}
if (minBucketIndex < 0) {
bucketSizes[maxBucketIndex] = size;
BatchUpdateBlobsRequest batchRequest = buckets[maxBucketIndex].build();
Stopwatch stopwatch = Stopwatch.createStarted();
BatchUpdateBlobsResponse batchResponse = casStub.batchUpdateBlobs(batchRequest);
long usecs = stopwatch.elapsed(MICROSECONDS);
checkState(batchResponse.getResponsesList().stream().allMatch(response -> Code.forNumber(response.getStatus().getCode()) == Code.OK));
System.out.println("Updated " + batchRequest.getRequestsCount() + " blobs in " + (usecs / 1000.0) + "ms");
buckets[maxBucketIndex] = BatchUpdateBlobsRequest.newBuilder().setInstanceName(instanceName).addRequests(request);
} else {
bucketSizes[minBucketIndex] += size;
buckets[minBucketIndex].addRequests(request);
}
} else {
Stopwatch stopwatch = Stopwatch.createStarted();
SettableFuture<WriteResponse> writtenFuture = SettableFuture.create();
StreamObserver<WriteRequest> requestObserver = bsStub.write(new StreamObserver<WriteResponse>() {
@Override
public void onNext(WriteResponse response) {
writtenFuture.set(response);
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable t) {
writtenFuture.setException(t);
}
});
HashCode hash = HashCode.fromString(missingDigest.getHash());
String resourceName = uploadResourceName(instanceName, uploadId, hash, missingDigest.getSizeBytes());
try (InputStream in = Files.newInputStream(path)) {
boolean first = true;
long writtenBytes = 0;
byte[] buf = new byte[64 * 1024];
while (writtenBytes != missingDigest.getSizeBytes()) {
int len = in.read(buf);
WriteRequest.Builder request = WriteRequest.newBuilder();
if (first) {
request.setResourceName(resourceName);
}
request.setData(ByteString.copyFrom(buf, 0, len)).setWriteOffset(writtenBytes);
if (writtenBytes + len == missingDigest.getSizeBytes()) {
request.setFinishWrite(true);
}
requestObserver.onNext(request.build());
writtenBytes += len;
first = false;
}
writtenFuture.get();
System.out.println("Wrote long " + DigestUtil.toString(missingDigest) + " in " + (stopwatch.elapsed(MICROSECONDS) / 1000.0) + "ms");
}
}
}
for (int i = 0; i < 128; i++) {
if (bucketSizes[i] > 0) {
BatchUpdateBlobsRequest batchRequest = buckets[i].build();
Stopwatch stopwatch = Stopwatch.createStarted();
BatchUpdateBlobsResponse batchResponse = casStub.batchUpdateBlobs(batchRequest);
long usecs = stopwatch.elapsed(MICROSECONDS);
checkState(batchResponse.getResponsesList().stream().allMatch(response -> Code.forNumber(response.getStatus().getCode()) == Code.OK));
System.out.println("Updated " + batchRequest.getRequestsCount() + " blobs in " + (usecs / 1000.0) + "ms");
}
}
}
use of build.bazel.remote.execution.v2.BatchUpdateBlobsResponse in project bazel-buildfarm by bazelbuild.
the class StubInstanceTest method putAllBlobsUploadsBlobs.
@Test
public void putAllBlobsUploadsBlobs() throws Exception {
String instanceName = "putAllBlobs-test";
serviceRegistry.addService(new ContentAddressableStorageImplBase() {
@Override
public void batchUpdateBlobs(BatchUpdateBlobsRequest batchRequest, StreamObserver<BatchUpdateBlobsResponse> responseObserver) {
checkState(batchRequest.getInstanceName().equals(instanceName));
responseObserver.onNext(BatchUpdateBlobsResponse.newBuilder().addAllResponses(batchRequest.getRequestsList().stream().map(request -> Response.newBuilder().setDigest(request.getDigest()).build()).collect(Collectors.toList())).build());
responseObserver.onCompleted();
}
});
Instance instance = newStubInstance("putAllBlobs-test");
ByteString first = ByteString.copyFromUtf8("first");
ByteString last = ByteString.copyFromUtf8("last");
ImmutableList<ByteString> blobs = ImmutableList.of(first, last);
ImmutableList<Digest> digests = ImmutableList.of(DIGEST_UTIL.compute(first), DIGEST_UTIL.compute(last));
assertThat(instance.putAllBlobs(blobs, RequestMetadata.getDefaultInstance())).containsAllIn(digests);
}
use of build.bazel.remote.execution.v2.BatchUpdateBlobsResponse in project bazel-buildfarm by bazelbuild.
the class BuildFarmServerTest method batchUpdateBlobs.
@Test
public void batchUpdateBlobs() {
DigestUtil digestUtil = new DigestUtil(HashFunction.SHA256);
ByteString content = ByteString.copyFromUtf8("Hello, World!");
Digest digest = digestUtil.compute(content);
BatchUpdateBlobsRequest request = BatchUpdateBlobsRequest.newBuilder().setInstanceName(INSTANCE_NAME).addRequests(Request.newBuilder().setDigest(digest).setData(content).build()).build();
ContentAddressableStorageGrpc.ContentAddressableStorageBlockingStub stub = ContentAddressableStorageGrpc.newBlockingStub(inProcessChannel);
BatchUpdateBlobsResponse response = stub.batchUpdateBlobs(request);
Response expected = Response.newBuilder().setDigest(digest).setStatus(com.google.rpc.Status.newBuilder().setCode(Code.OK.getNumber()).build()).build();
assertThat(response.getResponsesList()).containsExactlyElementsIn(Collections.singleton(expected));
}
use of build.bazel.remote.execution.v2.BatchUpdateBlobsResponse in project bazel-buildfarm by bazelbuild.
the class StubInstance method putAllBlobs.
@Override
public Iterable<Digest> putAllBlobs(Iterable<ByteString> blobs, RequestMetadata requestMetadata) {
long totalSize = 0;
ImmutableList.Builder<Request> requests = ImmutableList.builder();
for (ByteString blob : blobs) {
checkState(totalSize + blob.size() <= maxBatchUpdateBlobsSize);
requests.add(Request.newBuilder().setDigest(digestUtil.compute(blob)).setData(blob).build());
totalSize += blob.size();
}
BatchUpdateBlobsRequest batchRequest = BatchUpdateBlobsRequest.newBuilder().setInstanceName(getName()).addAllRequests(requests.build()).build();
BatchUpdateBlobsResponse batchResponse = deadlined(casBlockingStub).withInterceptors(attachMetadataInterceptor(requestMetadata)).batchUpdateBlobs(batchRequest);
PutAllBlobsException exception = null;
for (BatchUpdateBlobsResponse.Response response : batchResponse.getResponsesList()) {
com.google.rpc.Status status = response.getStatus();
if (Code.forNumber(status.getCode()) != Code.OK) {
if (exception == null) {
exception = new PutAllBlobsException();
}
exception.addFailedResponse(response);
}
}
if (exception != null) {
throw exception;
}
return Iterables.transform(batchResponse.getResponsesList(), BatchUpdateBlobsResponse.Response::getDigest);
}
use of build.bazel.remote.execution.v2.BatchUpdateBlobsResponse in project bazel-buildfarm by bazelbuild.
the class ContentAddressableStorageService method batchUpdateBlobs.
@Override
public void batchUpdateBlobs(BatchUpdateBlobsRequest batchRequest, StreamObserver<BatchUpdateBlobsResponse> responseObserver) {
ImmutableList.Builder<BatchUpdateBlobsResponse.Response> responses = new ImmutableList.Builder<>();
Function<com.google.rpc.Code, com.google.rpc.Status> statusForCode = (code) -> com.google.rpc.Status.newBuilder().setCode(code.getNumber()).build();
for (Request request : batchRequest.getRequestsList()) {
Digest digest = request.getDigest();
try {
simpleBlobStore.put(digest.getHash(), digest.getSizeBytes(), request.getData().newInput());
responses.add(BatchUpdateBlobsResponse.Response.newBuilder().setDigest(digest).setStatus(statusForCode.apply(com.google.rpc.Code.OK)).build());
} catch (IOException e) {
StatusException statusException = Status.fromThrowable(e).asException();
responses.add(BatchUpdateBlobsResponse.Response.newBuilder().setDigest(digest).setStatus(StatusProto.fromThrowable(statusException)).build());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
responseObserver.onNext(BatchUpdateBlobsResponse.newBuilder().addAllResponses(responses.build()).build());
responseObserver.onCompleted();
}
Aggregations