Search in sources :

Example 6 with Item

use of com.amazonaws.services.dynamodbv2.document.Item in project nifi by apache.

the class AbstractWriteDynamoDBProcessor method handleUnprocessedItems.

/**
 * Helper method to handle unprocessed items items
 * @param session process session
 * @param keysToFlowFileMap map of flow db primary key to flow file
 * @param table dynamodb table
 * @param hashKeyName the hash key name
 * @param hashKeyValueType the hash key value
 * @param rangeKeyName the range key name
 * @param rangeKeyValueType range key value
 * @param outcome the write outcome
 */
protected void handleUnprocessedItems(final ProcessSession session, Map<ItemKeys, FlowFile> keysToFlowFileMap, final String table, final String hashKeyName, final String hashKeyValueType, final String rangeKeyName, final String rangeKeyValueType, BatchWriteItemOutcome outcome) {
    BatchWriteItemResult result = outcome.getBatchWriteItemResult();
    // Handle unprocessed items
    List<WriteRequest> unprocessedItems = result.getUnprocessedItems().get(table);
    if (unprocessedItems != null && unprocessedItems.size() > 0) {
        for (WriteRequest request : unprocessedItems) {
            Map<String, AttributeValue> item = getRequestItem(request);
            Object hashKeyValue = getValue(item, hashKeyName, hashKeyValueType);
            Object rangeKeyValue = getValue(item, rangeKeyName, rangeKeyValueType);
            sendUnprocessedToUnprocessedRelationship(session, keysToFlowFileMap, hashKeyValue, rangeKeyValue);
        }
    }
}
Also used : BatchWriteItemResult(com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult) AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) WriteRequest(com.amazonaws.services.dynamodbv2.model.WriteRequest)

Example 7 with Item

use of com.amazonaws.services.dynamodbv2.document.Item in project nifi by apache.

the class DeleteDynamoDB method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
    if (flowFiles == null || flowFiles.size() == 0) {
        return;
    }
    Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
    final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
    final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
    final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
    TableWriteItems tableWriteItems = new TableWriteItems(table);
    for (FlowFile flowFile : flowFiles) {
        final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
        final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
        if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
            continue;
        }
        if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
            continue;
        }
        if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
            tableWriteItems.addHashOnlyPrimaryKeysToDelete(hashKeyName, hashKeyValue);
        } else {
            tableWriteItems.addHashAndRangePrimaryKeyToDelete(hashKeyName, hashKeyValue, rangeKeyName, rangeKeyValue);
        }
        keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
    }
    if (keysToFlowFileMap.isEmpty()) {
        return;
    }
    final DynamoDB dynamoDB = getDynamoDB();
    try {
        BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
        handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName, rangeKeyValueType, outcome);
        // All non unprocessed items are successful
        for (FlowFile flowFile : keysToFlowFileMap.values()) {
            getLogger().debug("Successfully deleted item from dynamodb : " + table);
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (AmazonServiceException exception) {
        getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (AmazonClientException exception) {
        getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (Exception exception) {
        getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) TableWriteItems(com.amazonaws.services.dynamodbv2.document.TableWriteItems) BatchWriteItemOutcome(com.amazonaws.services.dynamodbv2.document.BatchWriteItemOutcome) HashMap(java.util.HashMap) AmazonClientException(com.amazonaws.AmazonClientException) DynamoDB(com.amazonaws.services.dynamodbv2.document.DynamoDB) AmazonServiceException(com.amazonaws.AmazonServiceException) AmazonClientException(com.amazonaws.AmazonClientException) AmazonServiceException(com.amazonaws.AmazonServiceException) List(java.util.List)

Example 8 with Item

use of com.amazonaws.services.dynamodbv2.document.Item in project nifi by apache.

the class GetDynamoDB method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    List<FlowFile> flowFiles = session.get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
    if (flowFiles == null || flowFiles.size() == 0) {
        return;
    }
    Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();
    final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();
    TableKeysAndAttributes tableKeysAndAttributes = new TableKeysAndAttributes(table);
    final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String jsonDocument = context.getProperty(JSON_DOCUMENT).evaluateAttributeExpressions().getValue();
    for (FlowFile flowFile : flowFiles) {
        final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
        final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);
        if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
            continue;
        }
        if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
            continue;
        }
        keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
        if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
            tableKeysAndAttributes.addHashOnlyPrimaryKey(hashKeyName, hashKeyValue);
        } else {
            tableKeysAndAttributes.addHashAndRangePrimaryKey(hashKeyName, hashKeyValue, rangeKeyName, rangeKeyValue);
        }
    }
    if (keysToFlowFileMap.isEmpty()) {
        return;
    }
    final DynamoDB dynamoDB = getDynamoDB();
    try {
        BatchGetItemOutcome result = dynamoDB.batchGetItem(tableKeysAndAttributes);
        // Handle processed items and get the json document
        List<Item> items = result.getTableItems().get(table);
        for (Item item : items) {
            ItemKeys itemKeys = new ItemKeys(item.get(hashKeyName), item.get(rangeKeyName));
            FlowFile flowFile = keysToFlowFileMap.get(itemKeys);
            if (item.get(jsonDocument) != null) {
                ByteArrayInputStream bais = new ByteArrayInputStream(item.getJSON(jsonDocument).getBytes());
                flowFile = session.importFrom(bais, flowFile);
            }
            session.transfer(flowFile, REL_SUCCESS);
            keysToFlowFileMap.remove(itemKeys);
        }
        // Handle unprocessed keys
        Map<String, KeysAndAttributes> unprocessedKeys = result.getUnprocessedKeys();
        if (unprocessedKeys != null && unprocessedKeys.size() > 0) {
            KeysAndAttributes keysAndAttributes = unprocessedKeys.get(table);
            List<Map<String, AttributeValue>> keys = keysAndAttributes.getKeys();
            for (Map<String, AttributeValue> unprocessedKey : keys) {
                Object hashKeyValue = getAttributeValue(context, HASH_KEY_VALUE_TYPE, unprocessedKey.get(hashKeyName));
                Object rangeKeyValue = getAttributeValue(context, RANGE_KEY_VALUE_TYPE, unprocessedKey.get(rangeKeyName));
                sendUnprocessedToUnprocessedRelationship(session, keysToFlowFileMap, hashKeyValue, rangeKeyValue);
            }
        }
        // Handle any remaining items
        for (ItemKeys key : keysToFlowFileMap.keySet()) {
            FlowFile flowFile = keysToFlowFileMap.get(key);
            flowFile = session.putAttribute(flowFile, DYNAMODB_KEY_ERROR_NOT_FOUND, DYNAMODB_KEY_ERROR_NOT_FOUND_MESSAGE + key.toString());
            session.transfer(flowFile, REL_NOT_FOUND);
            keysToFlowFileMap.remove(key);
        }
    } catch (AmazonServiceException exception) {
        getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (AmazonClientException exception) {
        getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (Exception exception) {
        getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) HashMap(java.util.HashMap) AmazonClientException(com.amazonaws.AmazonClientException) DynamoDB(com.amazonaws.services.dynamodbv2.document.DynamoDB) KeysAndAttributes(com.amazonaws.services.dynamodbv2.model.KeysAndAttributes) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) BatchGetItemOutcome(com.amazonaws.services.dynamodbv2.document.BatchGetItemOutcome) AmazonServiceException(com.amazonaws.AmazonServiceException) AmazonClientException(com.amazonaws.AmazonClientException) Item(com.amazonaws.services.dynamodbv2.document.Item) ByteArrayInputStream(java.io.ByteArrayInputStream) AmazonServiceException(com.amazonaws.AmazonServiceException) List(java.util.List) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) HashMap(java.util.HashMap) Map(java.util.Map)

Example 9 with Item

use of com.amazonaws.services.dynamodbv2.document.Item in project nifi by apache.

the class GetDynamoDBTest method testStringHashStringRangeGetJsonObjectValid.

@Test
public void testStringHashStringRangeGetJsonObjectValid() throws IOException {
    outcome = new BatchGetItemOutcome(result);
    KeysAndAttributes kaa = new KeysAndAttributes();
    Map<String, AttributeValue> map = new HashMap<>();
    map.put("hashS", new AttributeValue("h1"));
    map.put("rangeS", new AttributeValue("r1"));
    kaa.withKeys(map);
    unprocessed = new HashMap<>();
    result.withUnprocessedKeys(unprocessed);
    Map<String, List<Map<String, AttributeValue>>> responses = new HashMap<>();
    List<Map<String, AttributeValue>> items = new ArrayList<>();
    Map<String, AttributeValue> item = new HashMap<String, AttributeValue>();
    String jsonDocument = "{\"name\": \"john\"}";
    item.put("j1", new AttributeValue(jsonDocument));
    item.put("hashS", new AttributeValue("h1"));
    item.put("rangeS", new AttributeValue("r1"));
    items.add(item);
    responses.put("StringHashStringRangeTable", items);
    result.withResponses(responses);
    final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {

        @Override
        public BatchGetItemOutcome batchGetItem(TableKeysAndAttributes... tableKeysAndAttributes) {
            return outcome;
        }
    };
    getDynamoDB = new GetDynamoDB() {

        @Override
        protected DynamoDB getDynamoDB() {
            return mockDynamoDB;
        }
    };
    final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
    getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
    getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
    getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
    getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
    getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
    getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
    getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
    getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
    getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
    getRunner.enqueue(new byte[] {});
    getRunner.run(1);
    getRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_SUCCESS, 1);
}
Also used : AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) HashMap(java.util.HashMap) TestRunner(org.apache.nifi.util.TestRunner) ArrayList(java.util.ArrayList) DynamoDB(com.amazonaws.services.dynamodbv2.document.DynamoDB) KeysAndAttributes(com.amazonaws.services.dynamodbv2.model.KeysAndAttributes) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) BatchGetItemOutcome(com.amazonaws.services.dynamodbv2.document.BatchGetItemOutcome) ArrayList(java.util.ArrayList) List(java.util.List) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.Test)

Example 10 with Item

use of com.amazonaws.services.dynamodbv2.document.Item in project nifi by apache.

the class GetDynamoDBTest method testStringHashStringRangeGetJsonObjectNull.

@Test
public void testStringHashStringRangeGetJsonObjectNull() {
    outcome = new BatchGetItemOutcome(result);
    KeysAndAttributes kaa = new KeysAndAttributes();
    Map<String, AttributeValue> map = new HashMap<>();
    map.put("hashS", new AttributeValue("h1"));
    map.put("rangeS", new AttributeValue("r1"));
    kaa.withKeys(map);
    unprocessed = new HashMap<>();
    result.withUnprocessedKeys(unprocessed);
    Map<String, List<Map<String, AttributeValue>>> responses = new HashMap<>();
    List<Map<String, AttributeValue>> items = new ArrayList<>();
    Map<String, AttributeValue> item = new HashMap<String, AttributeValue>();
    item.put("j1", null);
    item.put("hashS", new AttributeValue("h1"));
    item.put("rangeS", new AttributeValue("r1"));
    items.add(item);
    responses.put("StringHashStringRangeTable", items);
    result.withResponses(responses);
    final DynamoDB mockDynamoDB = new DynamoDB(Regions.AP_NORTHEAST_1) {

        @Override
        public BatchGetItemOutcome batchGetItem(TableKeysAndAttributes... tableKeysAndAttributes) {
            return outcome;
        }
    };
    getDynamoDB = new GetDynamoDB() {

        @Override
        protected DynamoDB getDynamoDB() {
            return mockDynamoDB;
        }
    };
    final TestRunner getRunner = TestRunners.newTestRunner(getDynamoDB);
    getRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY, "abcd");
    getRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
    getRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
    getRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
    getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
    getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
    getRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
    getRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
    getRunner.setProperty(AbstractDynamoDBProcessor.JSON_DOCUMENT, "j1");
    getRunner.enqueue(new byte[] {});
    getRunner.run(1);
    getRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_SUCCESS, 1);
    List<MockFlowFile> flowFiles = getRunner.getFlowFilesForRelationship(AbstractDynamoDBProcessor.REL_SUCCESS);
    for (MockFlowFile flowFile : flowFiles) {
        assertNull(flowFile.getContentClaim());
    }
}
Also used : AttributeValue(com.amazonaws.services.dynamodbv2.model.AttributeValue) HashMap(java.util.HashMap) TestRunner(org.apache.nifi.util.TestRunner) ArrayList(java.util.ArrayList) DynamoDB(com.amazonaws.services.dynamodbv2.document.DynamoDB) KeysAndAttributes(com.amazonaws.services.dynamodbv2.model.KeysAndAttributes) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) BatchGetItemOutcome(com.amazonaws.services.dynamodbv2.document.BatchGetItemOutcome) MockFlowFile(org.apache.nifi.util.MockFlowFile) ArrayList(java.util.ArrayList) List(java.util.List) TableKeysAndAttributes(com.amazonaws.services.dynamodbv2.document.TableKeysAndAttributes) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.Test)

Aggregations

AttributeValue (com.amazonaws.services.dynamodbv2.model.AttributeValue)20 AmazonDynamoDB (com.amazonaws.services.dynamodbv2.AmazonDynamoDB)17 AmazonClientException (com.amazonaws.AmazonClientException)13 AmazonServiceException (com.amazonaws.AmazonServiceException)13 HashMap (java.util.HashMap)13 Test (org.junit.Test)12 Item (com.amazonaws.services.dynamodbv2.document.Item)11 IOException (java.io.IOException)7 ArrayList (java.util.ArrayList)6 DynamoDB (com.amazonaws.services.dynamodbv2.document.DynamoDB)5 Expected (com.amazonaws.services.dynamodbv2.document.Expected)5 AttributeValueUpdate (com.amazonaws.services.dynamodbv2.model.AttributeValueUpdate)5 DeleteItemSpec (com.amazonaws.services.dynamodbv2.document.spec.DeleteItemSpec)4 ConsumedCapacity (com.amazonaws.services.dynamodbv2.model.ConsumedCapacity)4 ScanRequest (com.amazonaws.services.dynamodbv2.model.ScanRequest)4 ScanResult (com.amazonaws.services.dynamodbv2.model.ScanResult)4 List (java.util.List)4 ToString (lombok.ToString)4 DynamoDBMapper (com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper)3 BatchGetItemOutcome (com.amazonaws.services.dynamodbv2.document.BatchGetItemOutcome)3