use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class TestKafkaTopic method testConsumeKafka.
@Test
public void testConsumeKafka() {
final String processorName = "ConsumeKafka";
final String transitUri = "PLAINTEXT://0.example.com:6667/topicA";
final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class);
when(record.getComponentType()).thenReturn(processorName);
when(record.getTransitUri()).thenReturn(transitUri);
when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE);
final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class);
when(clusterResolvers.fromHostNames(matches(".+\\.example\\.com"))).thenReturn("cluster1");
final AnalysisContext context = Mockito.mock(AnalysisContext.class);
when(context.getClusterResolver()).thenReturn(clusterResolvers);
final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType());
assertNotNull(analyzer);
final DataSetRefs refs = analyzer.analyze(context, record);
assertEquals(1, refs.getInputs().size());
assertEquals(0, refs.getOutputs().size());
Referenceable ref = refs.getInputs().iterator().next();
assertEquals("kafka_topic", ref.getTypeName());
assertEquals("topicA", ref.get(ATTR_NAME));
assertEquals("topicA", ref.get("topic"));
assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME));
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class TestKafkaTopic method testConsumeKafkaRecord_0_10.
@Test
public void testConsumeKafkaRecord_0_10() {
final String processorName = "ConsumeKafkaRecord_0_10";
final String transitUri = "PLAINTEXT://0.example.com:6667/topicA";
final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class);
when(record.getComponentType()).thenReturn(processorName);
when(record.getTransitUri()).thenReturn(transitUri);
when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE);
final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class);
when(clusterResolvers.fromHostNames(matches(".+\\.example\\.com"))).thenReturn("cluster1");
final AnalysisContext context = Mockito.mock(AnalysisContext.class);
when(context.getClusterResolver()).thenReturn(clusterResolvers);
final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType());
assertNotNull(analyzer);
final DataSetRefs refs = analyzer.analyze(context, record);
assertEquals(1, refs.getInputs().size());
assertEquals(0, refs.getOutputs().size());
Referenceable ref = refs.getInputs().iterator().next();
assertEquals("kafka_topic", ref.getTypeName());
assertEquals("topicA", ref.get(ATTR_NAME));
assertEquals("topicA", ref.get("topic"));
assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME));
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class TestNiFiRemotePort method testRemoteInputPort.
@Test
public void testRemoteInputPort() {
final String componentType = "Remote Input Port";
final String transitUri = "http://0.example.com:8080/nifi-api/data-transfer/input-ports/port-guid/transactions/tx-guid/flow-files";
final ProvenanceEventRecord sendEvent = Mockito.mock(ProvenanceEventRecord.class);
when(sendEvent.getEventId()).thenReturn(123L);
when(sendEvent.getComponentId()).thenReturn("port-guid");
when(sendEvent.getComponentType()).thenReturn(componentType);
when(sendEvent.getTransitUri()).thenReturn(transitUri);
when(sendEvent.getEventType()).thenReturn(ProvenanceEventType.SEND);
final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class);
when(clusterResolvers.fromHostNames(matches(".+\\.example\\.com"))).thenReturn("cluster1");
final List<ConnectionStatus> connections = new ArrayList<>();
final ConnectionStatus connection = new ConnectionStatus();
connection.setDestinationId("port-guid");
connection.setDestinationName("inputPortA");
connections.add(connection);
final AnalysisContext context = Mockito.mock(AnalysisContext.class);
when(context.getClusterResolver()).thenReturn(clusterResolvers);
when(context.findConnectionTo(matches("port-guid"))).thenReturn(connections);
final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(componentType, transitUri, sendEvent.getEventType());
assertNotNull(analyzer);
final DataSetRefs refs = analyzer.analyze(context, sendEvent);
assertEquals(0, refs.getInputs().size());
assertEquals(1, refs.getOutputs().size());
assertEquals(1, refs.getComponentIds().size());
// Should report connected componentId.
assertTrue(refs.getComponentIds().contains("port-guid"));
Referenceable ref = refs.getOutputs().iterator().next();
assertEquals(TYPE_NIFI_INPUT_PORT, ref.getTypeName());
assertEquals("inputPortA", ref.get(ATTR_NAME));
assertEquals("port-guid@cluster1", ref.get(ATTR_QUALIFIED_NAME));
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class ITReportLineageToAtlas method createEdge.
private EdgeNode createEdge(ProvenanceRecords prs, int srcIdx, int tgtIdx) {
// Generate C created a FlowFile
final ProvenanceEventRecord srcR = prs.get(srcIdx);
// Then Remote Input Port sent it
final ProvenanceEventRecord tgtR = prs.get(tgtIdx);
final EventNode src = new EventNode(srcR);
final EventNode tgt = new EventNode(tgtR);
final EdgeNode edge = new EdgeNode(srcR.getComponentType() + " to " + tgtR.getEventType(), src, tgt);
return edge;
}
use of org.apache.nifi.provenance.ProvenanceEventRecord in project nifi by apache.
the class TestPutElasticsearchHttpRecord method testPutElasticSearchBasic.
// ///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Integration test section below
//
// The tests below are meant to run on real ES instances, and are thus @Ignored during normal test execution.
// However if you wish to execute them as part of a test phase, comment out the @Ignored line for each
// desired test.
// ///////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Tests basic ES functionality against a local or test ES cluster
*/
@Test
@Ignore("Comment this out if you want to run against local or test ES")
public void testPutElasticSearchBasic() {
System.out.println("Starting test " + new Object() {
}.getClass().getEnclosingMethod().getName());
final TestRunner runner = TestRunners.newTestRunner(new PutElasticsearchHttpRecord());
runner.setValidateExpressionUsage(false);
runner.setProperty(AbstractElasticsearchHttpProcessor.ES_URL, "http://127.0.0.1:9200");
runner.setProperty(PutElasticsearchHttpRecord.INDEX, "doc");
runner.setProperty(PutElasticsearchHttpRecord.TYPE, "status");
runner.setProperty(PutElasticsearchHttpRecord.ID_RECORD_PATH, "/id");
runner.assertValid();
runner.enqueue(new byte[0], new HashMap<String, String>() {
{
put("doc_id", "28039652140");
}
});
runner.enqueue(new byte[0]);
runner.run(1, true, true);
runner.assertAllFlowFilesTransferred(PutElasticsearchHttpRecord.REL_SUCCESS, 1);
List<ProvenanceEventRecord> provEvents = runner.getProvenanceEvents();
assertNotNull(provEvents);
assertEquals(1, provEvents.size());
assertEquals(ProvenanceEventType.SEND, provEvents.get(0).getEventType());
}
Aggregations