use of org.apache.nifi.util.MockFlowFile in project nifi by apache.
the class TestGetSQS method testGetMessageAndAutoDelete.
@Test
public void testGetMessageAndAutoDelete() {
runner.setProperty(GetSQS.QUEUE_URL, "https://sqs.us-west-2.amazonaws.com/123456789012/test-queue-000000000");
runner.setProperty(GetSQS.AUTO_DELETE, "true");
Message message1 = new Message();
message1.setBody("TestMessage1");
message1.setMessageId("test-message-id-1");
message1.setReceiptHandle("test-receipt-handle-1");
Message message2 = new Message();
message2.setBody("TestMessage2");
message2.setMessageId("test-message-id-2");
message2.setReceiptHandle("test-receipt-handle-2");
ReceiveMessageResult receiveMessageResult = new ReceiveMessageResult().withMessages(message1, message2);
Mockito.when(mockSQSClient.receiveMessage(Mockito.any(ReceiveMessageRequest.class))).thenReturn(receiveMessageResult);
runner.run(1);
ArgumentCaptor<ReceiveMessageRequest> captureReceiveRequest = ArgumentCaptor.forClass(ReceiveMessageRequest.class);
Mockito.verify(mockSQSClient, Mockito.times(1)).receiveMessage(captureReceiveRequest.capture());
ReceiveMessageRequest receiveRequest = captureReceiveRequest.getValue();
assertEquals("https://sqs.us-west-2.amazonaws.com/123456789012/test-queue-000000000", receiveRequest.getQueueUrl());
ArgumentCaptor<DeleteMessageBatchRequest> captureDeleteRequest = ArgumentCaptor.forClass(DeleteMessageBatchRequest.class);
Mockito.verify(mockSQSClient, Mockito.times(1)).deleteMessageBatch(captureDeleteRequest.capture());
DeleteMessageBatchRequest deleteRequest = captureDeleteRequest.getValue();
assertEquals("https://sqs.us-west-2.amazonaws.com/123456789012/test-queue-000000000", deleteRequest.getQueueUrl());
assertEquals("test-message-id-1", deleteRequest.getEntries().get(0).getId());
assertEquals("test-message-id-2", deleteRequest.getEntries().get(1).getId());
runner.assertAllFlowFilesTransferred(GetSQS.REL_SUCCESS, 2);
List<MockFlowFile> flowFiles = runner.getFlowFilesForRelationship(GetSQS.REL_SUCCESS);
MockFlowFile ff0 = flowFiles.get(0);
ff0.assertAttributeEquals("sqs.message.id", "test-message-id-1");
MockFlowFile ff1 = flowFiles.get(1);
ff1.assertAttributeEquals("sqs.message.id", "test-message-id-2");
}
use of org.apache.nifi.util.MockFlowFile in project nifi by apache.
the class TestConsumeAzureEventHub method testCheckpointFailure.
@Test
public void testCheckpointFailure() throws Exception {
final Iterable<EventData> eventDataList = Arrays.asList(new EventData("one".getBytes(StandardCharsets.UTF_8)), new EventData("two".getBytes(StandardCharsets.UTF_8)));
doThrow(new RuntimeException("Failed to create a checkpoint.")).when(partitionContext).checkpoint();
eventProcessor.onEvents(partitionContext, eventDataList);
// Even if it fails to create a checkpoint, these FlowFiles are already committed.
processSession.assertCommitted();
final List<MockFlowFile> flowFiles = processSession.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
assertEquals(2, flowFiles.size());
final MockFlowFile msg1 = flowFiles.get(0);
msg1.assertContentEquals("one");
final MockFlowFile msg2 = flowFiles.get(1);
msg2.assertContentEquals("two");
final List<ProvenanceEventRecord> provenanceEvents = sharedState.getProvenanceEvents();
assertEquals(2, provenanceEvents.size());
}
use of org.apache.nifi.util.MockFlowFile in project nifi by apache.
the class TestConsumeAzureEventHub method testReceiveRecordReaderFailure.
@Test
public void testReceiveRecordReaderFailure() throws Exception {
final List<EventData> eventDataList = Arrays.asList(new EventData("one".getBytes(StandardCharsets.UTF_8)), new EventData("two".getBytes(StandardCharsets.UTF_8)), new EventData("three".getBytes(StandardCharsets.UTF_8)), new EventData("four".getBytes(StandardCharsets.UTF_8)));
setupRecordReader(eventDataList, 2, null);
setupRecordWriter();
eventProcessor.onEvents(partitionContext, eventDataList);
processSession.assertCommitted();
final List<MockFlowFile> flowFiles = processSession.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
assertEquals(1, flowFiles.size());
final MockFlowFile ff1 = flowFiles.get(0);
ff1.assertContentEquals("onetwofour");
ff1.assertAttributeEquals("eventhub.name", "eventhub-name");
ff1.assertAttributeEquals("eventhub.partition", "partition-id");
final List<MockFlowFile> failedFFs = processSession.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_PARSE_FAILURE);
assertEquals(1, failedFFs.size());
final MockFlowFile failed1 = failedFFs.get(0);
failed1.assertContentEquals("three");
failed1.assertAttributeEquals("eventhub.name", "eventhub-name");
failed1.assertAttributeEquals("eventhub.partition", "partition-id");
final List<ProvenanceEventRecord> provenanceEvents = sharedState.getProvenanceEvents();
assertEquals(2, provenanceEvents.size());
final ProvenanceEventRecord provenanceEvent1 = provenanceEvents.get(0);
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent1.getEventType());
assertEquals("amqps://namespace.servicebus.windows.net/" + "eventhub-name/ConsumerGroups/consumer-group/Partitions/partition-id", provenanceEvent1.getTransitUri());
final ProvenanceEventRecord provenanceEvent2 = provenanceEvents.get(1);
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvent2.getEventType());
assertEquals("amqps://namespace.servicebus.windows.net/" + "eventhub-name/ConsumerGroups/consumer-group/Partitions/partition-id", provenanceEvent2.getTransitUri());
}
use of org.apache.nifi.util.MockFlowFile in project nifi by apache.
the class TestConsumeAzureEventHub method testReceiveTwo.
@Test
public void testReceiveTwo() throws Exception {
final Iterable<EventData> eventDataList = Arrays.asList(new EventData("one".getBytes(StandardCharsets.UTF_8)), new EventData("two".getBytes(StandardCharsets.UTF_8)));
eventProcessor.onEvents(partitionContext, eventDataList);
processSession.assertCommitted();
final List<MockFlowFile> flowFiles = processSession.getFlowFilesForRelationship(ConsumeAzureEventHub.REL_SUCCESS);
assertEquals(2, flowFiles.size());
final MockFlowFile msg1 = flowFiles.get(0);
msg1.assertContentEquals("one");
final MockFlowFile msg2 = flowFiles.get(1);
msg2.assertContentEquals("two");
final List<ProvenanceEventRecord> provenanceEvents = sharedState.getProvenanceEvents();
assertEquals(2, provenanceEvents.size());
}
use of org.apache.nifi.util.MockFlowFile in project nifi by apache.
the class ITFetchAzureBlobStorage method testFetchingBlob.
@Test
public void testFetchingBlob() throws InvalidKeyException, URISyntaxException, StorageException, IOException {
String containerName = String.format("%s-%s", AzureTestUtil.TEST_CONTAINER_NAME_PREFIX, UUID.randomUUID());
CloudBlobContainer container = AzureTestUtil.getContainer(containerName);
container.createIfNotExists();
CloudBlob blob = container.getBlockBlobReference(AzureTestUtil.TEST_BLOB_NAME);
byte[] buf = "0123456789".getBytes();
InputStream in = new ByteArrayInputStream(buf);
blob.upload(in, 10);
final TestRunner runner = TestRunners.newTestRunner(new FetchAzureBlobStorage());
try {
runner.setValidateExpressionUsage(true);
runner.setProperty(AzureStorageUtils.ACCOUNT_NAME, AzureTestUtil.getAccountName());
runner.setProperty(AzureStorageUtils.ACCOUNT_KEY, AzureTestUtil.getAccountKey());
runner.setProperty(AzureStorageUtils.CONTAINER, containerName);
runner.setProperty(FetchAzureBlobStorage.BLOB, "${azure.blobname}");
final Map<String, String> attributes = new HashMap<>();
attributes.put("azure.primaryUri", "https://" + AzureTestUtil.getAccountName() + ".blob.core.windows.net/" + containerName + "/" + AzureTestUtil.TEST_BLOB_NAME);
attributes.put("azure.blobname", AzureTestUtil.TEST_BLOB_NAME);
attributes.put("azure.blobtype", AzureStorageUtils.BLOCK);
runner.enqueue(new byte[0], attributes);
runner.run();
runner.assertAllFlowFilesTransferred(AbstractAzureBlobProcessor.REL_SUCCESS, 1);
List<MockFlowFile> flowFilesForRelationship = runner.getFlowFilesForRelationship(FetchAzureBlobStorage.REL_SUCCESS);
for (MockFlowFile flowFile : flowFilesForRelationship) {
flowFile.assertContentEquals("0123456789".getBytes());
flowFile.assertAttributeEquals("azure.length", "10");
}
} finally {
container.deleteIfExists();
}
}
Aggregations