use of software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughputExceededException in project para by Erudika.
the class AWSDynamoUtils method batchWrite.
/**
* Writes multiple items in batch.
* @param items a map of tables->write requests
* @param backoff backoff seconds
*/
protected static void batchWrite(Map<String, List<WriteRequest>> items, int backoff) {
if (items == null || items.isEmpty()) {
return;
}
try {
logger.debug("batchWrite(): requests {}, backoff {}", items.values().iterator().next().size(), backoff);
BatchWriteItemResponse result = getClient().batchWriteItem(b -> b.returnConsumedCapacity(ReturnConsumedCapacity.TOTAL).requestItems(items));
if (result == null) {
return;
}
logger.debug("batchWrite(): success - consumed capacity {}", result.consumedCapacity());
if (result.unprocessedItems() != null && !result.unprocessedItems().isEmpty()) {
Thread.sleep((long) backoff * 1000L);
for (Map.Entry<String, List<WriteRequest>> entry : result.unprocessedItems().entrySet()) {
logger.warn("UNPROCESSED DynamoDB write requests for keys {} in table {}!", entry.getValue().stream().map(r -> r.getValueForField(Config._KEY, String.class).orElse("")).collect(Collectors.joining(",")), entry.getKey());
}
batchWrite(result.unprocessedItems(), backoff * 2);
}
} catch (ProvisionedThroughputExceededException ex) {
logger.warn("Write capacity exceeded for table '{}'. Retrying request in {} seconds.", items.keySet().iterator().next(), backoff);
try {
Thread.sleep((long) backoff * 1000L);
// retry forever
batchWrite(items, backoff * 2);
} catch (InterruptedException ie) {
logger.error(null, ie);
Thread.currentThread().interrupt();
}
} catch (InterruptedException ie) {
logger.error(null, ie);
Thread.currentThread().interrupt();
} catch (Exception e) {
logger.error("Failed to execute batch write operation on table '{}'", items.keySet().iterator().next(), e);
throwIfNecessary(e);
}
}
use of software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughputExceededException in project para by Erudika.
the class AWSDynamoUtils method batchGet.
/**
* Reads multiple items from DynamoDB, in batch.
* @param <P> type of object
* @param kna a map of row key->data
* @param results a map of ID->ParaObject
* @param backoff backoff seconds
*/
protected static <P extends ParaObject> void batchGet(Map<String, KeysAndAttributes> kna, Map<String, P> results, int backoff) {
if (kna == null || kna.isEmpty() || results == null) {
return;
}
try {
BatchGetItemResponse result = getClient().batchGetItem(b -> b.returnConsumedCapacity(ReturnConsumedCapacity.TOTAL).requestItems(kna));
if (result == null) {
return;
}
List<Map<String, AttributeValue>> res = result.responses().get(kna.keySet().iterator().next());
for (Map<String, AttributeValue> item : res) {
P obj = fromRow(item);
if (obj != null) {
results.put(obj.getId(), obj);
}
}
logger.debug("batchGet(): total {}, cc {}", res.size(), result.consumedCapacity());
if (result.unprocessedKeys() != null && !result.unprocessedKeys().isEmpty()) {
Thread.sleep((long) backoff * 1000L);
for (Map.Entry<String, KeysAndAttributes> entry : result.unprocessedKeys().entrySet()) {
logger.warn("UNPROCESSED DynamoDB read requests for keys {} in table {}!", entry.getValue().keys().stream().flatMap(r -> r.values().stream().map(v -> v.s())).collect(Collectors.joining(",")), entry.getKey());
}
batchGet(result.unprocessedKeys(), results, backoff * 2);
}
} catch (ProvisionedThroughputExceededException ex) {
logger.warn("Read capacity exceeded for table '{}'. Retrying request in {} seconds.", kna.keySet().iterator().next(), backoff);
try {
Thread.sleep((long) backoff * 1000L);
// retry forever
batchGet(kna, results, backoff * 2);
} catch (InterruptedException ie) {
logger.error(null, ie);
Thread.currentThread().interrupt();
}
} catch (InterruptedException ie) {
logger.error(null, ie);
Thread.currentThread().interrupt();
} catch (Exception e) {
logger.error("Failed to execute batch read operation on table '{}'", kna.keySet().iterator().next(), e);
}
}
Aggregations