use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDB method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
if (flowFiles == null || flowFiles.size() == 0) {
return;
}
Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
TableWriteItems tableWriteItems = new TableWriteItems(table);
for (FlowFile flowFile : flowFiles) {
final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
continue;
}
if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
continue;
}
if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
tableWriteItems.addHashOnlyPrimaryKeysToDelete(hashKeyName, hashKeyValue);
} else {
tableWriteItems.addHashAndRangePrimaryKeyToDelete(hashKeyName, hashKeyValue, rangeKeyName, rangeKeyValue);
}
keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
}
if (keysToFlowFileMap.isEmpty()) {
return;
}
final DynamoDB dynamoDB = getDynamoDB();
try {
BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName, rangeKeyValueType, outcome);
// All non unprocessed items are successful
for (FlowFile flowFile : keysToFlowFileMap.values()) {
getLogger().debug("Successfully deleted item from dynamodb : " + table);
session.transfer(flowFile, REL_SUCCESS);
}
} catch (AmazonServiceException exception) {
getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (AmazonClientException exception) {
getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (Exception exception) {
getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class GetDynamoDB method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
if (flowFiles == null || flowFiles.size() == 0) {
return;
}
Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
TableKeysAndAttributes tableKeysAndAttributes = new TableKeysAndAttributes(table);
final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
final String jsonDocument = context.getProperty(JSON_DOCUMENT).evaluateAttributeExpressions().getValue();
for (FlowFile flowFile : flowFiles) {
final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
continue;
}
if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
continue;
}
keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
tableKeysAndAttributes.addHashOnlyPrimaryKey(hashKeyName, hashKeyValue);
} else {
tableKeysAndAttributes.addHashAndRangePrimaryKey(hashKeyName, hashKeyValue, rangeKeyName, rangeKeyValue);
}
}
if (keysToFlowFileMap.isEmpty()) {
return;
}
final DynamoDB dynamoDB = getDynamoDB();
try {
BatchGetItemOutcome result = dynamoDB.batchGetItem(tableKeysAndAttributes);
// Handle processed items and get the json document
List<Item> items = result.getTableItems().get(table);
for (Item item : items) {
ItemKeys itemKeys = new ItemKeys(item.get(hashKeyName), item.get(rangeKeyName));
FlowFile flowFile = keysToFlowFileMap.get(itemKeys);
if (item.get(jsonDocument) != null) {
ByteArrayInputStream bais = new ByteArrayInputStream(item.getJSON(jsonDocument).getBytes());
flowFile = session.importFrom(bais, flowFile);
}
session.transfer(flowFile, REL_SUCCESS);
keysToFlowFileMap.remove(itemKeys);
}
// Handle unprocessed keys
Map<String, KeysAndAttributes> unprocessedKeys = result.getUnprocessedKeys();
if (unprocessedKeys != null && unprocessedKeys.size() > 0) {
KeysAndAttributes keysAndAttributes = unprocessedKeys.get(table);
List<Map<String, AttributeValue>> keys = keysAndAttributes.getKeys();
for (Map<String, AttributeValue> unprocessedKey : keys) {
Object hashKeyValue = getAttributeValue(context, HASH_KEY_VALUE_TYPE, unprocessedKey.get(hashKeyName));
Object rangeKeyValue = getAttributeValue(context, RANGE_KEY_VALUE_TYPE, unprocessedKey.get(rangeKeyName));
sendUnprocessedToUnprocessedRelationship(session, keysToFlowFileMap, hashKeyValue, rangeKeyValue);
}
}
// Handle any remaining items
for (ItemKeys key : keysToFlowFileMap.keySet()) {
FlowFile flowFile = keysToFlowFileMap.get(key);
flowFile = session.putAttribute(flowFile, DYNAMODB_KEY_ERROR_NOT_FOUND, DYNAMODB_KEY_ERROR_NOT_FOUND_MESSAGE + key.toString());
session.transfer(flowFile, REL_NOT_FOUND);
keysToFlowFileMap.remove(key);
}
} catch (AmazonServiceException exception) {
getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (AmazonClientException exception) {
getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (Exception exception) {
getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDBTest method testStringHashStringRangeDeleteOnlyHashFailure.
@Test
public void testStringHashStringRangeDeleteOnlyHashFailure() {
// Inject a mock DynamoDB to create the exception condition
final DynamoDB mockDynamoDb = Mockito.mock(DynamoDB.class);
// When writing, mock thrown service exception from AWS
Mockito.when(mockDynamoDb.batchWriteItem(Matchers.<TableWriteItems>anyVararg())).thenThrow(getSampleAwsServiceException());
deleteDynamoDB = new DeleteDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDb;
}
};
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
deleteRunner.enqueue(new byte[] {});
deleteRunner.run(1);
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = deleteRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE);
for (MockFlowFile flowFile : flowFiles) {
ITAbstractDynamoDBTest.validateServiceExceptionAttribute(flowFile);
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDBTest method testStringHashStringRangeDeleteThrowsClientException.
@Test
public void testStringHashStringRangeDeleteThrowsClientException() {
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
@Override
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
throw new AmazonClientException("clientException");
}
};
deleteDynamoDB = new DeleteDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDB;
}
};
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
deleteRunner.enqueue(new byte[] {});
deleteRunner.run(1);
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = deleteRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE);
for (MockFlowFile flowFile : flowFiles) {
assertEquals("clientException", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE));
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDBTest method testStringHashStringRangeDeleteThrowsRuntimeException.
@Test
public void testStringHashStringRangeDeleteThrowsRuntimeException() {
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
@Override
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
throw new RuntimeException("runtimeException");
}
};
deleteDynamoDB = new DeleteDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDB;
}
};
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
deleteRunner.enqueue(new byte[] {});
deleteRunner.run(1);
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = deleteRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE);
for (MockFlowFile flowFile : flowFiles) {
assertEquals("runtimeException", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE));
}
}
Aggregations