use of com.amazonaws.services.dynamodbv2.model.KeysAndAttributes in project camel by apache.
the class BatchGetItemsCommandTest method execute.
@Test
public void execute() {
Map<String, AttributeValue> key = new HashMap<String, AttributeValue>();
key.put("1", new AttributeValue("Key_1"));
Map<String, AttributeValue> unprocessedKey = new HashMap<String, AttributeValue>();
unprocessedKey.put("1", new AttributeValue("UNPROCESSED_KEY"));
Map<String, KeysAndAttributes> keysAndAttributesMap = new HashMap<String, KeysAndAttributes>();
KeysAndAttributes keysAndAttributes = new KeysAndAttributes().withKeys(key);
keysAndAttributesMap.put("DOMAIN1", keysAndAttributes);
exchange.getIn().setHeader(DdbConstants.BATCH_ITEMS, keysAndAttributesMap);
command.execute();
assertEquals(keysAndAttributesMap, ddbClient.batchGetItemRequest.getRequestItems());
List<Map<String, AttributeValue>> batchResponse = (List<Map<String, AttributeValue>>) exchange.getIn().getHeader(DdbConstants.BATCH_RESPONSE, Map.class).get("DOMAIN1");
AttributeValue value = batchResponse.get(0).get("attrName");
KeysAndAttributes unProcessedAttributes = (KeysAndAttributes) exchange.getIn().getHeader(DdbConstants.UNPROCESSED_KEYS, Map.class).get("DOMAIN1");
Map<String, AttributeValue> next = unProcessedAttributes.getKeys().iterator().next();
assertEquals(new AttributeValue("attrValue"), value);
assertEquals(unprocessedKey, next);
}
use of com.amazonaws.services.dynamodbv2.model.KeysAndAttributes in project nifi by apache.
the class GetDynamoDBTest method testStringHashStringNoRangeGetUnprocessed.
// Incorporated test from James W
@Test
public void testStringHashStringNoRangeGetUnprocessed() {
unprocessed.clear();
KeysAndAttributes kaa = new KeysAndAttributes();
Map<String, AttributeValue> map = new HashMap<>();
map.put("hashS", new AttributeValue("h1"));
kaa.withKeys(map);
unprocessed.put(stringHashStringRangeTableName, kaa);
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
getRunner.enqueue(new byte[] {});
getRunner.run(1);
getRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_UNPROCESSED, 1);
List<MockFlowFile> flowFiles = getRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_UNPROCESSED);
for (MockFlowFile flowFile : flowFiles) {
assertNotNull(flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_KEY_ERROR_UNPROCESSED));
}
}
use of com.amazonaws.services.dynamodbv2.model.KeysAndAttributes in project aws-doc-sdk-examples by awsdocs.
the class DocumentAPIBatchGet method retrieveMultipleItemsBatchGet.
private static void retrieveMultipleItemsBatchGet() {
try {
TableKeysAndAttributes forumTableKeysAndAttributes = new TableKeysAndAttributes(forumTableName);
// Add a partition key
forumTableKeysAndAttributes.addHashOnlyPrimaryKeys("Name", "Amazon S3", "Amazon DynamoDB");
TableKeysAndAttributes threadTableKeysAndAttributes = new TableKeysAndAttributes(threadTableName);
// Add a partition key and a sort key
threadTableKeysAndAttributes.addHashAndRangePrimaryKeys("ForumName", "Subject", "Amazon DynamoDB", "DynamoDB Thread 1", "Amazon DynamoDB", "DynamoDB Thread 2", "Amazon S3", "S3 Thread 1");
System.out.println("Making the request.");
BatchGetItemOutcome outcome = dynamoDB.batchGetItem(forumTableKeysAndAttributes, threadTableKeysAndAttributes);
Map<String, KeysAndAttributes> unprocessed = null;
do {
for (String tableName : outcome.getTableItems().keySet()) {
System.out.println("Items in table " + tableName);
List<Item> items = outcome.getTableItems().get(tableName);
for (Item item : items) {
System.out.println(item.toJSONPretty());
}
}
// Check for unprocessed keys which could happen if you exceed
// provisioned
// throughput or reach the limit on response size.
unprocessed = outcome.getUnprocessedKeys();
if (unprocessed.isEmpty()) {
System.out.println("No unprocessed keys found");
} else {
System.out.println("Retrieving the unprocessed keys");
outcome = dynamoDB.batchGetItemUnprocessed(unprocessed);
}
} while (!unprocessed.isEmpty());
} catch (Exception e) {
System.err.println("Failed to retrieve items.");
System.err.println(e.getMessage());
}
}
use of com.amazonaws.services.dynamodbv2.model.KeysAndAttributes in project aws-doc-sdk-examples by awsdocs.
the class LowLevelBatchGet method retrieveMultipleItemsBatchGet.
private static void retrieveMultipleItemsBatchGet() {
try {
Map<String, KeysAndAttributes> requestItems = new HashMap<String, KeysAndAttributes>();
List<Map<String, AttributeValue>> tableKeys = new ArrayList<Map<String, AttributeValue>>();
Map<String, AttributeValue> key = new HashMap<String, AttributeValue>();
key.put("Name", new AttributeValue().withS("Amazon S3"));
tableKeys.add(key);
key = new HashMap<String, AttributeValue>();
key.put("Name", new AttributeValue().withS("Amazon DynamoDB"));
tableKeys.add(key);
requestItems.put(table1Name, new KeysAndAttributes().withKeys(tableKeys));
tableKeys = new ArrayList<Map<String, AttributeValue>>();
key = new HashMap<String, AttributeValue>();
key.put("ForumName", new AttributeValue().withS("Amazon DynamoDB"));
key.put("Subject", new AttributeValue().withS("DynamoDB Thread 1"));
tableKeys.add(key);
key = new HashMap<String, AttributeValue>();
key.put("ForumName", new AttributeValue().withS("Amazon DynamoDB"));
key.put("Subject", new AttributeValue().withS("DynamoDB Thread 2"));
tableKeys.add(key);
key = new HashMap<String, AttributeValue>();
key.put("ForumName", new AttributeValue().withS("Amazon S3"));
key.put("Subject", new AttributeValue().withS("S3 Thread 1"));
tableKeys.add(key);
requestItems.put(table2Name, new KeysAndAttributes().withKeys(tableKeys));
BatchGetItemResult result;
BatchGetItemRequest batchGetItemRequest = new BatchGetItemRequest();
do {
System.out.println("Making the request.");
batchGetItemRequest.withRequestItems(requestItems);
result = client.batchGetItem(batchGetItemRequest);
List<Map<String, AttributeValue>> table1Results = result.getResponses().get(table1Name);
if (table1Results != null) {
System.out.println("Items in table " + table1Name);
for (Map<String, AttributeValue> item : table1Results) {
printItem(item);
}
}
List<Map<String, AttributeValue>> table2Results = result.getResponses().get(table2Name);
if (table2Results != null) {
System.out.println("\nItems in table " + table2Name);
for (Map<String, AttributeValue> item : table2Results) {
printItem(item);
}
}
// throughput or reach the limit on response size.
for (Map.Entry<String, KeysAndAttributes> pair : result.getUnprocessedKeys().entrySet()) {
System.out.println("Unprocessed key pair: " + pair.getKey() + ", " + pair.getValue());
}
requestItems = result.getUnprocessedKeys();
} while (result.getUnprocessedKeys().size() > 0);
} catch (AmazonServiceException ase) {
System.err.println("Failed to retrieve items.");
}
}
Aggregations