use of com.amazonaws.services.dynamodbv2.document.Table in project nifi by apache.
the class PutDynamoDB method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
if (flowFiles == null || flowFiles.size() == 0) {
return;
}
Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
final String jsonDocument = context.getProperty(JSON_DOCUMENT).evaluateAttributeExpressions().getValue();
final String charset = context.getProperty(DOCUMENT_CHARSET).evaluateAttributeExpressions().getValue();
TableWriteItems tableWriteItems = new TableWriteItems(table);
for (FlowFile flowFile : flowFiles) {
final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
continue;
}
if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
continue;
}
if (!isDataValid(flowFile, jsonDocument)) {
flowFile = session.putAttribute(flowFile, AWS_DYNAMO_DB_ITEM_SIZE_ERROR, "Max size of item + attribute should be 400kb but was " + flowFile.getSize() + jsonDocument.length());
session.transfer(flowFile, REL_FAILURE);
continue;
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
session.exportTo(flowFile, baos);
try {
if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue).withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
} else {
tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue).withKeyComponent(rangeKeyName, rangeKeyValue).withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
}
} catch (IOException ioe) {
getLogger().error("IOException while creating put item : " + ioe.getMessage());
flowFile = session.putAttribute(flowFile, DYNAMODB_ITEM_IO_ERROR, ioe.getMessage());
session.transfer(flowFile, REL_FAILURE);
}
keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
}
if (keysToFlowFileMap.isEmpty()) {
return;
}
final DynamoDB dynamoDB = getDynamoDB();
try {
BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName, rangeKeyValueType, outcome);
// Handle any remaining flowfiles
for (FlowFile flowFile : keysToFlowFileMap.values()) {
getLogger().debug("Successful posted items to dynamodb : " + table);
session.transfer(flowFile, REL_SUCCESS);
}
} catch (AmazonServiceException exception) {
getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (AmazonClientException exception) {
getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (Exception exception) {
getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
}
}
use of com.amazonaws.services.dynamodbv2.document.Table in project nifi by apache.
the class ITAbstractDynamoDBTest method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
FileInputStream fis = new FileInputStream(CREDENTIALS_FILE);
final PropertiesCredentials credentials = new PropertiesCredentials(fis);
amazonDynamoDBClient = new AmazonDynamoDBClient(credentials);
dynamoDB = new DynamoDB(amazonDynamoDBClient);
amazonDynamoDBClient.setRegion(Region.getRegion(Regions.US_WEST_2));
ArrayList<AttributeDefinition> attributeDefinitions = new ArrayList<AttributeDefinition>();
attributeDefinitions.add(new AttributeDefinition().withAttributeName("hashS").withAttributeType("S"));
attributeDefinitions.add(new AttributeDefinition().withAttributeName("rangeS").withAttributeType("S"));
ArrayList<KeySchemaElement> keySchema = new ArrayList<KeySchemaElement>();
keySchema.add(new KeySchemaElement().withAttributeName("hashS").withKeyType(KeyType.HASH));
keySchema.add(new KeySchemaElement().withAttributeName("rangeS").withKeyType(KeyType.RANGE));
CreateTableRequest request = new CreateTableRequest().withTableName(stringHashStringRangeTableName).withKeySchema(keySchema).withAttributeDefinitions(attributeDefinitions).withProvisionedThroughput(new ProvisionedThroughput().withReadCapacityUnits(5L).withWriteCapacityUnits(6L));
Table stringHashStringRangeTable = dynamoDB.createTable(request);
stringHashStringRangeTable.waitForActive();
attributeDefinitions = new ArrayList<AttributeDefinition>();
attributeDefinitions.add(new AttributeDefinition().withAttributeName("hashN").withAttributeType("N"));
attributeDefinitions.add(new AttributeDefinition().withAttributeName("rangeN").withAttributeType("N"));
keySchema = new ArrayList<KeySchemaElement>();
keySchema.add(new KeySchemaElement().withAttributeName("hashN").withKeyType(KeyType.HASH));
keySchema.add(new KeySchemaElement().withAttributeName("rangeN").withKeyType(KeyType.RANGE));
request = new CreateTableRequest().withTableName(numberHashNumberRangeTableName).withKeySchema(keySchema).withAttributeDefinitions(attributeDefinitions).withProvisionedThroughput(new ProvisionedThroughput().withReadCapacityUnits(5L).withWriteCapacityUnits(6L));
Table numberHashNumberRangeTable = dynamoDB.createTable(request);
numberHashNumberRangeTable.waitForActive();
attributeDefinitions = new ArrayList<AttributeDefinition>();
attributeDefinitions.add(new AttributeDefinition().withAttributeName("hashN").withAttributeType("N"));
keySchema = new ArrayList<KeySchemaElement>();
keySchema.add(new KeySchemaElement().withAttributeName("hashN").withKeyType(KeyType.HASH));
request = new CreateTableRequest().withTableName(numberHashOnlyTableName).withKeySchema(keySchema).withAttributeDefinitions(attributeDefinitions).withProvisionedThroughput(new ProvisionedThroughput().withReadCapacityUnits(5L).withWriteCapacityUnits(6L));
Table numberHashOnlyTable = dynamoDB.createTable(request);
numberHashOnlyTable.waitForActive();
}
use of com.amazonaws.services.dynamodbv2.document.Table in project amos-ss17-alexa by c-i-ber.
the class DynamoDbMapper method dropTable.
/**
* drops table by a pojo class
*
* @param cl class which will be mapped to drop a table
*/
public void dropTable(Class cl) {
DeleteTableRequest tableRequest = mapper.generateDeleteTableRequest(cl);
Table table = dynamoDB.getTable(tableRequest.getTableName());
try {
dynamoDbClient.deleteTable(tableRequest);
table.waitForDelete();
} catch (ResourceNotFoundException e) {
// Table does not exist
} catch (InterruptedException e) {
e.printStackTrace();
}
}
use of com.amazonaws.services.dynamodbv2.document.Table in project amos-ss17-alexa by c-i-ber.
the class DynamoDbMapper method createTable.
/**
* creates a new table by a pojo class
*
* @param cl class which will be mapped to the new table
*/
public void createTable(Class cl) throws InterruptedException {
CreateTableRequest tableRequest = mapper.generateCreateTableRequest(cl);
tableRequest.setProvisionedThroughput(new ProvisionedThroughput(1L, 1L));
Table table = dynamoDB.createTable(tableRequest);
table.waitForActive();
}
use of com.amazonaws.services.dynamodbv2.document.Table in project wildfly-camel by wildfly-extras.
the class DynamoDBUtils method createTable.
public static TableDescription createTable(AmazonDynamoDB client, String tableName) throws InterruptedException {
CreateTableRequest tableReq = new CreateTableRequest().withTableName(tableName).withKeySchema(new KeySchemaElement("Id", KeyType.HASH)).withAttributeDefinitions(new AttributeDefinition("Id", ScalarAttributeType.N)).withProvisionedThroughput(new ProvisionedThroughput(10L, 10L)).withStreamSpecification(new StreamSpecification().withStreamEnabled(true).withStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES));
DynamoDB dynamoDB = new DynamoDB(client);
Table table = dynamoDB.createTable(tableReq);
return table.waitForActive();
}
Aggregations