use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project openhab1-addons by openhab.
the class DynamoDBPersistenceService method store.
/**
* {@inheritDoc}
*/
@Override
public void store(Item item, String alias) {
if (item.getState() instanceof UnDefType) {
logger.debug("Undefined item state received. Not storing item.");
return;
}
if (!isProperlyConfigured) {
logger.warn("Configuration for dynamodb not yet loaded or broken. Not storing item.");
return;
}
if (!maybeConnectAndCheckConnection()) {
logger.warn("DynamoDB not connected. Not storing item.");
return;
}
String realName = item.getName();
String name = (alias != null) ? alias : realName;
Date time = new Date(System.currentTimeMillis());
State state = item.getState();
logger.trace("Tried to get item from item class {}, state is {}", item.getClass(), state.toString());
DynamoDBItem<?> dynamoItem = AbstractDynamoDBItem.fromState(name, state, time);
DynamoDBMapper mapper = getDBMapper(tableNameResolver.fromItem(dynamoItem));
if (!createTable(mapper, dynamoItem.getClass())) {
logger.warn("Table creation failed. Not storing item");
return;
}
try {
logger.debug("storing {} in dynamo. Serialized value {}. Original Item: {}", name, state, item);
mapper.save(dynamoItem);
logger.debug("Sucessfully stored item {}", item);
} catch (AmazonClientException e) {
logger.error("Error storing object to dynamo: {}", e.getMessage());
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project openhab1-addons by openhab.
the class DynamoDBPersistenceService method createQueryExpression.
/**
* Construct dynamodb query from filter
*
* @param filter
* @return DynamoDBQueryExpression corresponding to the given FilterCriteria
*/
private DynamoDBQueryExpression<DynamoDBItem<?>> createQueryExpression(Class<? extends DynamoDBItem<?>> dtoClass, FilterCriteria filter) {
DynamoDBItem<?> item = getDynamoDBHashKey(dtoClass, filter.getItemName());
final DynamoDBQueryExpression<DynamoDBItem<?>> queryExpression = new DynamoDBQueryExpression<DynamoDBItem<?>>().withHashKeyValues(item).withScanIndexForward(filter.getOrdering() == Ordering.ASCENDING).withLimit(filter.getPageSize());
Condition timeFilter = maybeAddTimeFilter(queryExpression, filter);
maybeAddStateFilter(filter, queryExpression);
logger.debug("Querying: {} with {}", filter.getItemName(), timeFilter);
return queryExpression;
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project YCSB by brianfrankcooper.
the class DynamoDBClient method init.
@Override
public void init() throws DBException {
String debug = getProperties().getProperty("dynamodb.debug", null);
if (null != debug && "true".equalsIgnoreCase(debug)) {
LOGGER.setLevel(Level.DEBUG);
}
String configuredEndpoint = getProperties().getProperty("dynamodb.endpoint", null);
String credentialsFile = getProperties().getProperty("dynamodb.awsCredentialsFile", null);
String primaryKey = getProperties().getProperty("dynamodb.primaryKey", null);
String primaryKeyTypeString = getProperties().getProperty("dynamodb.primaryKeyType", null);
String consistentReads = getProperties().getProperty("dynamodb.consistentReads", null);
String connectMax = getProperties().getProperty("dynamodb.connectMax", null);
if (null != connectMax) {
this.maxConnects = Integer.parseInt(connectMax);
}
if (null != consistentReads && "true".equalsIgnoreCase(consistentReads)) {
this.consistentRead = true;
}
if (null != configuredEndpoint) {
this.endpoint = configuredEndpoint;
}
if (null == primaryKey || primaryKey.length() < 1) {
throw new DBException("Missing primary key attribute name, cannot continue");
}
if (null != primaryKeyTypeString) {
try {
this.primaryKeyType = PrimaryKeyType.valueOf(primaryKeyTypeString.trim().toUpperCase());
} catch (IllegalArgumentException e) {
throw new DBException("Invalid primary key mode specified: " + primaryKeyTypeString + ". Expecting HASH or HASH_AND_RANGE.");
}
}
if (this.primaryKeyType == PrimaryKeyType.HASH_AND_RANGE) {
// When the primary key type is HASH_AND_RANGE, keys used by YCSB
// are range keys so we can benchmark performance of individual hash
// partitions. In this case, the user must specify the hash key's name
// and optionally can designate a value for the hash key.
String configuredHashKeyName = getProperties().getProperty("dynamodb.hashKeyName", null);
if (null == configuredHashKeyName || configuredHashKeyName.isEmpty()) {
throw new DBException("Must specify a non-empty hash key name when the primary key type is HASH_AND_RANGE.");
}
this.hashKeyName = configuredHashKeyName;
this.hashKeyValue = getProperties().getProperty("dynamodb.hashKeyValue", DEFAULT_HASH_KEY_VALUE);
}
try {
AWSCredentials credentials = new PropertiesCredentials(new File(credentialsFile));
ClientConfiguration cconfig = new ClientConfiguration();
cconfig.setMaxConnections(maxConnects);
dynamoDB = new AmazonDynamoDBClient(credentials, cconfig);
dynamoDB.setEndpoint(this.endpoint);
primaryKeyName = primaryKey;
LOGGER.info("dynamodb connection created with " + this.endpoint);
} catch (Exception e1) {
LOGGER.error("DynamoDBClient.init(): Could not initialize DynamoDB client.", e1);
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class AbstractWriteDynamoDBProcessor method handleUnprocessedItems.
/**
* Helper method to handle unprocessed items items
* @param session process session
* @param keysToFlowFileMap map of flow db primary key to flow file
* @param table dynamodb table
* @param hashKeyName the hash key name
* @param hashKeyValueType the hash key value
* @param rangeKeyName the range key name
* @param rangeKeyValueType range key value
* @param outcome the write outcome
*/
protected void handleUnprocessedItems(final ProcessSession session, Map<ItemKeys, FlowFile> keysToFlowFileMap, final String table, final String hashKeyName, final String hashKeyValueType, final String rangeKeyName, final String rangeKeyValueType, BatchWriteItemOutcome outcome) {
BatchWriteItemResult result = outcome.getBatchWriteItemResult();
// Handle unprocessed items
List<WriteRequest> unprocessedItems = result.getUnprocessedItems().get(table);
if (unprocessedItems != null && unprocessedItems.size() > 0) {
for (WriteRequest request : unprocessedItems) {
Map<String, AttributeValue> item = getRequestItem(request);
Object hashKeyValue = getValue(item, hashKeyName, hashKeyValueType);
Object rangeKeyValue = getValue(item, rangeKeyName, rangeKeyValueType);
sendUnprocessedToUnprocessedRelationship(session, keysToFlowFileMap, hashKeyValue, rangeKeyValue);
}
}
}
use of com.amazonaws.services.dynamodbv2.document.DynamoDB in project nifi by apache.
the class DeleteDynamoDB method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
if (flowFiles == null || flowFiles.size() == 0) {
return;
}
Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
TableWriteItems tableWriteItems = new TableWriteItems(table);
for (FlowFile flowFile : flowFiles) {
final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
continue;
}
if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
continue;
}
if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
tableWriteItems.addHashOnlyPrimaryKeysToDelete(hashKeyName, hashKeyValue);
} else {
tableWriteItems.addHashAndRangePrimaryKeyToDelete(hashKeyName, hashKeyValue, rangeKeyName, rangeKeyValue);
}
keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
}
if (keysToFlowFileMap.isEmpty()) {
return;
}
final DynamoDB dynamoDB = getDynamoDB();
try {
BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName, rangeKeyValueType, outcome);
// All non unprocessed items are successful
for (FlowFile flowFile : keysToFlowFileMap.values()) {
getLogger().debug("Successfully deleted item from dynamodb : " + table);
session.transfer(flowFile, REL_SUCCESS);
}
} catch (AmazonServiceException exception) {
getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (AmazonClientException exception) {
getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
} catch (Exception exception) {
getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
session.transfer(failedFlowFiles, REL_FAILURE);
}
}
Aggregations