use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project cas by apereo.
the class DynamoDbServiceRegistryFacilitator method createServicesTable.
/**
* Create tables.
*
* @param deleteTables the delete tables
*/
@SneakyThrows
public void createServicesTable(final boolean deleteTables) {
LOGGER.debug("Attempting to create DynamoDb services table");
final CreateTableRequest request = new CreateTableRequest().withAttributeDefinitions(new AttributeDefinition(ColumnNames.ID.getColumnName(), ScalarAttributeType.S)).withKeySchema(new KeySchemaElement(ColumnNames.ID.getColumnName(), KeyType.HASH)).withProvisionedThroughput(new ProvisionedThroughput(dynamoDbProperties.getReadCapacity(), dynamoDbProperties.getWriteCapacity())).withTableName(dynamoDbProperties.getTableName());
if (deleteTables) {
final DeleteTableRequest delete = new DeleteTableRequest(request.getTableName());
LOGGER.debug("Sending delete request [{}] to remove table if necessary", delete);
TableUtils.deleteTableIfExists(amazonDynamoDBClient, delete);
}
LOGGER.debug("Sending delete request [{}] to create table", request);
TableUtils.createTableIfNotExists(amazonDynamoDBClient, request);
LOGGER.debug("Waiting until table [{}] becomes active...", request.getTableName());
TableUtils.waitUntilActive(amazonDynamoDBClient, request.getTableName());
final DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(request.getTableName());
LOGGER.debug("Sending request [{}] to obtain table description...", describeTableRequest);
final TableDescription tableDescription = amazonDynamoDBClient.describeTable(describeTableRequest).getTable();
LOGGER.debug("Located newly created table with description: [{}]", tableDescription);
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class PutDynamoDB method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
if (flowFiles == null || flowFiles.size() == 0) {
return;
}
Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
final String jsonDocument = context.getProperty(JSON_DOCUMENT).evaluateAttributeExpressions().getValue();
final String charset = context.getProperty(DOCUMENT_CHARSET).evaluateAttributeExpressions().getValue();
TableWriteItems tableWriteItems = new TableWriteItems(table);
for (FlowFile flowFile : flowFiles) {
final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
continue;
}
if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
continue;
}
if (!isDataValid(flowFile, jsonDocument)) {
flowFile = session.putAttribute(flowFile, AWS_DYNAMO_DB_ITEM_SIZE_ERROR, "Max size of item + attribute should be 400kb but was " + flowFile.getSize() + jsonDocument.length());
session.transfer(flowFile, REL_FAILURE);
continue;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
session.exportTo(flowFile, baos);
try {
if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue).withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
} else {
tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue).withKeyComponent(rangeKeyName, rangeKeyValue).withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
}
} catch (IOException ioe) {
getLogger().error("IOException while creating put item : " + ioe.getMessage());
flowFile = session.putAttribute(flowFile, DYNAMODB_ITEM_IO_ERROR, ioe.getMessage());
session.transfer(flowFile, REL_FAILURE);
}
keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
}
if (keysToFlowFileMap.isEmpty()) {
return;
}
final DynamoDB dynamoDB = getDynamoDB();
try {
BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName, rangeKeyValueType, outcome);
// Handle any remaining flowfiles
for (FlowFile flowFile : keysToFlowFileMap.values()) {
getLogger().debug("Successful posted items to dynamodb : " + table);
session.transfer(flowFile, REL_SUCCESS);
}
} catch (AmazonServiceException exception) {
getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (AmazonClientException exception) {
getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (Exception exception) {
getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDBTest method setUp.
@Before
public void setUp() {
outcome = new BatchWriteItemOutcome(result);
result.setUnprocessedItems(new HashMap<String, List<WriteRequest>>());
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
@Override
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
return outcome;
}
};
deleteDynamoDB = new DeleteDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDB;
}
};
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDBTest method testStringHashStringRangeDeleteThrowsServiceException.
@Test
public void testStringHashStringRangeDeleteThrowsServiceException() {
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
@Override
public BatchWriteItemOutcome batchWriteItem(TableWriteItems... tableWriteItems) {
throw new AmazonServiceException("serviceException");
}
};
deleteDynamoDB = new DeleteDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDB;
}
};
final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);
deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
deleteRunner.enqueue(new byte[] {});
deleteRunner.run(1);
deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = deleteRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE);
for (MockFlowFile flowFile : flowFiles) {
assertEquals("serviceException (Service: null; Status Code: 0; Error Code: null; Request ID: null)", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE));
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class GetDynamoDBTest method testStringHashStringRangeGetThrowsClientException.
@Test
public void testStringHashStringRangeGetThrowsClientException() {
final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {
@Override
public BatchGetItemOutcome batchGetItem(TableKeysAndAttributes... tableKeysAndAttributes) {
throw new AmazonClientException("clientException");
}
};
final GetDynamoDB getDynamoDB = new GetDynamoDB() {
@Override
protected DynamoDB getDynamoDB() {
return mockDynamoDB;
}
};
final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
getRunner.enqueue(new byte[] {});
getRunner.run(1);
getRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_FAILURE, 1);
List<MockFlowFile> flowFiles = getRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_FAILURE);
for (MockFlowFile flowFile : flowFiles) {
assertEquals("clientException", flowFile.getAttribute(AbstractDynamoDBProcessor.DYNAMODB_ERROR_EXCEPTION_MESSAGE));
}
}
Aggregations