use of com.google.api.client.googleapis.batch.BatchRequest in project beam by apache.
the class GcsUtil method makeGetBatches.
/**
* Makes get {@link BatchRequest BatchRequests}.
*
* @param paths {@link GcsPath GcsPaths}.
* @param results mutable {@link List} for return values.
* @return {@link BatchRequest BatchRequests} to execute.
* @throws IOException
*/
@VisibleForTesting
List<BatchRequest> makeGetBatches(Collection<GcsPath> paths, List<StorageObjectOrIOException[]> results) throws IOException {
List<BatchRequest> batches = new LinkedList<>();
for (List<GcsPath> filesToGet : Lists.partition(Lists.newArrayList(paths), MAX_REQUESTS_PER_BATCH)) {
BatchRequest batch = createBatchRequest();
for (GcsPath path : filesToGet) {
results.add(enqueueGetFileSize(path, batch));
}
batches.add(batch);
}
return batches;
}
use of com.google.api.client.googleapis.batch.BatchRequest in project beam by apache.
the class GcsUtilTest method sumBatchSizes.
private static int sumBatchSizes(List<BatchRequest> batches) {
int ret = 0;
for (BatchRequest b : batches) {
ret += b.size();
assertThat(b.size(), greaterThan(0));
}
return ret;
}
use of com.google.api.client.googleapis.batch.BatchRequest in project elasticsearch by elastic.
the class GoogleCloudStorageBlobStore method deleteBlobs.
/**
* Deletes multiple blobs in the given bucket (uses a batch request to perform this)
*
* @param blobNames names of the bucket to delete
*/
void deleteBlobs(Collection<String> blobNames) throws IOException {
if (blobNames == null || blobNames.isEmpty()) {
return;
}
if (blobNames.size() == 1) {
deleteBlob(blobNames.iterator().next());
return;
}
final List<Storage.Objects.Delete> deletions = new ArrayList<>();
final Iterator<String> blobs = blobNames.iterator();
SocketAccess.doPrivilegedVoidIOException(() -> {
while (blobs.hasNext()) {
// Create a delete request for each blob to delete
deletions.add(client.objects().delete(bucket, blobs.next()));
if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) {
try {
// Deletions are executed using a batch request
BatchRequest batch = client.batch();
// Used to track successful deletions
CountDown countDown = new CountDown(deletions.size());
for (Storage.Objects.Delete delete : deletions) {
// Queue the delete request in batch
delete.queue(batch, new JsonBatchCallback<Void>() {
@Override
public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException {
logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e.getMessage());
}
@Override
public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException {
countDown.countDown();
}
});
}
batch.execute();
if (countDown.isCountedDown() == false) {
throw new IOException("Failed to delete all [" + deletions.size() + "] blobs");
}
} finally {
deletions.clear();
}
}
}
});
}
use of com.google.api.client.googleapis.batch.BatchRequest in project beam by apache.
the class GcsUtil method makeCopyBatches.
List<BatchRequest> makeCopyBatches(Iterable<String> srcFilenames, Iterable<String> destFilenames) throws IOException {
List<String> srcList = Lists.newArrayList(srcFilenames);
List<String> destList = Lists.newArrayList(destFilenames);
checkArgument(srcList.size() == destList.size(), "Number of source files %s must equal number of destination files %s", srcList.size(), destList.size());
List<BatchRequest> batches = new LinkedList<>();
BatchRequest batch = createBatchRequest();
for (int i = 0; i < srcList.size(); i++) {
final GcsPath sourcePath = GcsPath.fromUri(srcList.get(i));
final GcsPath destPath = GcsPath.fromUri(destList.get(i));
enqueueCopy(sourcePath, destPath, batch);
if (batch.size() >= MAX_REQUESTS_PER_BATCH) {
batches.add(batch);
batch = createBatchRequest();
}
}
if (batch.size() > 0) {
batches.add(batch);
}
return batches;
}
use of com.google.api.client.googleapis.batch.BatchRequest in project beam by apache.
the class GcsUtil method executeBatches.
private static void executeBatches(List<BatchRequest> batches) throws IOException {
ListeningExecutorService executor = MoreExecutors.listeningDecorator(MoreExecutors.getExitingExecutorService(new ThreadPoolExecutor(MAX_CONCURRENT_BATCHES, MAX_CONCURRENT_BATCHES, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>())));
List<ListenableFuture<Void>> futures = new LinkedList<>();
for (final BatchRequest batch : batches) {
futures.add(executor.submit(new Callable<Void>() {
public Void call() throws IOException {
batch.execute();
return null;
}
}));
}
try {
Futures.allAsList(futures).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Interrupted while executing batch GCS request", e);
} catch (ExecutionException e) {
if (e.getCause() instanceof FileNotFoundException) {
throw (FileNotFoundException) e.getCause();
}
throw new IOException("Error executing batch GCS request", e);
} finally {
executor.shutdown();
}
}
Aggregations