Search in sources :

Example 1 with MockRecordParser

use of org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser in project nifi by apache.

the class TestConsumeKafkaRecord_1_0 method setup.

@Before
public void setup() throws InitializationException {
    mockLease = mock(ConsumerLease.class);
    mockConsumerPool = mock(ConsumerPool.class);
    ConsumeKafkaRecord_1_0 proc = new ConsumeKafkaRecord_1_0() {

        @Override
        protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) {
            return mockConsumerPool;
        }
    };
    runner = TestRunners.newTestRunner(proc);
    runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "okeydokey:1234");
    final String readerId = "record-reader";
    final MockRecordParser readerService = new MockRecordParser();
    readerService.addSchemaField("name", RecordFieldType.STRING);
    readerService.addSchemaField("age", RecordFieldType.INT);
    runner.addControllerService(readerId, readerService);
    runner.enableControllerService(readerService);
    final String writerId = "record-writer";
    final RecordSetWriterFactory writerService = new MockRecordWriter("name, age");
    runner.addControllerService(writerId, writerService);
    runner.enableControllerService(writerService);
    runner.setProperty(ConsumeKafkaRecord_1_0.RECORD_READER, readerId);
    runner.setProperty(ConsumeKafkaRecord_1_0.RECORD_WRITER, writerId);
}
Also used : RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) MockRecordWriter(org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessContext(org.apache.nifi.processor.ProcessContext) MockRecordParser(org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser) Before(org.junit.Before)

Example 2 with MockRecordParser

use of org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser in project nifi by apache.

the class TestPublishKafkaRecord_1_0 method setup.

@Before
public void setup() throws InitializationException, IOException {
    mockPool = mock(PublisherPool.class);
    mockLease = mock(PublisherLease.class);
    Mockito.doCallRealMethod().when(mockLease).publish(any(FlowFile.class), any(RecordSet.class), any(RecordSetWriterFactory.class), any(RecordSchema.class), any(String.class), any(String.class));
    when(mockPool.obtainPublisher()).thenReturn(mockLease);
    runner = TestRunners.newTestRunner(new PublishKafkaRecord_1_0() {

        @Override
        protected PublisherPool createPublisherPool(final ProcessContext context) {
            return mockPool;
        }
    });
    runner.setProperty(PublishKafkaRecord_1_0.TOPIC, TOPIC_NAME);
    final String readerId = "record-reader";
    final MockRecordParser readerService = new MockRecordParser();
    readerService.addSchemaField("name", RecordFieldType.STRING);
    readerService.addSchemaField("age", RecordFieldType.INT);
    runner.addControllerService(readerId, readerService);
    runner.enableControllerService(readerService);
    final String writerId = "record-writer";
    final RecordSetWriterFactory writerService = new MockRecordWriter("name, age");
    runner.addControllerService(writerId, writerService);
    runner.enableControllerService(writerService);
    runner.setProperty(PublishKafkaRecord_1_0.RECORD_READER, readerId);
    runner.setProperty(PublishKafkaRecord_1_0.RECORD_WRITER, writerId);
    runner.setProperty(PublishKafka_1_0.DELIVERY_GUARANTEE, PublishKafka_1_0.DELIVERY_REPLICATED);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) RecordSet(org.apache.nifi.serialization.record.RecordSet) MockRecordWriter(org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) ProcessContext(org.apache.nifi.processor.ProcessContext) MockRecordParser(org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser) Before(org.junit.Before)

Example 3 with MockRecordParser

use of org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser in project nifi by apache.

the class TestPublisherLease method testRecordsSentToRecordWriterAndThenToProducer.

@Test
public void testRecordsSentToRecordWriterAndThenToProducer() throws IOException, SchemaNotFoundException, MalformedRecordException {
    final PublisherLease lease = new PublisherLease(producer, 1024 * 1024, 10L, logger, true, null, StandardCharsets.UTF_8);
    final FlowFile flowFile = new MockFlowFile(1L);
    final byte[] exampleInput = "101, John Doe, 48\n102, Jane Doe, 47".getBytes(StandardCharsets.UTF_8);
    final MockRecordParser readerService = new MockRecordParser();
    readerService.addSchemaField("person_id", RecordFieldType.LONG);
    readerService.addSchemaField("name", RecordFieldType.STRING);
    readerService.addSchemaField("age", RecordFieldType.INT);
    final RecordReader reader = readerService.createRecordReader(Collections.emptyMap(), new ByteArrayInputStream(exampleInput), logger);
    final RecordSet recordSet = reader.createRecordSet();
    final RecordSchema schema = reader.getSchema();
    final String topic = "unit-test";
    final String keyField = "person_id";
    final RecordSetWriterFactory writerFactory = Mockito.mock(RecordSetWriterFactory.class);
    final RecordSetWriter writer = Mockito.mock(RecordSetWriter.class);
    Mockito.when(writer.write(Mockito.any(Record.class))).thenReturn(WriteResult.of(1, Collections.emptyMap()));
    Mockito.when(writerFactory.createWriter(eq(logger), eq(schema), any())).thenReturn(writer);
    lease.publish(flowFile, recordSet, writerFactory, schema, keyField, topic);
    verify(writerFactory, times(2)).createWriter(eq(logger), eq(schema), any());
    verify(writer, times(2)).write(any(Record.class));
    verify(producer, times(2)).send(any(), any());
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) RecordReader(org.apache.nifi.serialization.RecordReader) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) MockFlowFile(org.apache.nifi.util.MockFlowFile) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) ByteArrayInputStream(java.io.ByteArrayInputStream) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.apache.nifi.serialization.record.Record) RecordSet(org.apache.nifi.serialization.record.RecordSet) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) MockRecordParser(org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser) Test(org.junit.Test)

Example 4 with MockRecordParser

use of org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser in project nifi by apache.

the class TestPublishKafkaRecord_0_10 method setup.

@Before
public void setup() throws InitializationException, IOException {
    mockPool = mock(PublisherPool.class);
    mockLease = mock(PublisherLease.class);
    Mockito.doCallRealMethod().when(mockLease).publish(any(FlowFile.class), any(RecordSet.class), any(RecordSetWriterFactory.class), any(RecordSchema.class), any(String.class), any(String.class));
    when(mockPool.obtainPublisher()).thenReturn(mockLease);
    runner = TestRunners.newTestRunner(new PublishKafkaRecord_0_10() {

        @Override
        protected PublisherPool createPublisherPool(final ProcessContext context) {
            return mockPool;
        }
    });
    runner.setProperty(PublishKafkaRecord_0_10.TOPIC, TOPIC_NAME);
    final String readerId = "record-reader";
    final MockRecordParser readerService = new MockRecordParser();
    readerService.addSchemaField("name", RecordFieldType.STRING);
    readerService.addSchemaField("age", RecordFieldType.INT);
    runner.addControllerService(readerId, readerService);
    runner.enableControllerService(readerService);
    final String writerId = "record-writer";
    final RecordSetWriterFactory writerService = new MockRecordWriter("name, age");
    runner.addControllerService(writerId, writerService);
    runner.enableControllerService(writerService);
    runner.setProperty(PublishKafkaRecord_0_10.RECORD_READER, readerId);
    runner.setProperty(PublishKafkaRecord_0_10.RECORD_WRITER, writerId);
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) RecordSet(org.apache.nifi.serialization.record.RecordSet) MockRecordWriter(org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) ProcessContext(org.apache.nifi.processor.ProcessContext) MockRecordParser(org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser) Before(org.junit.Before)

Example 5 with MockRecordParser

use of org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser in project nifi by apache.

the class TestConsumeKafkaRecord_0_10 method setup.

@Before
public void setup() throws InitializationException {
    mockLease = mock(ConsumerLease.class);
    mockConsumerPool = mock(ConsumerPool.class);
    ConsumeKafkaRecord_0_10 proc = new ConsumeKafkaRecord_0_10() {

        @Override
        protected ConsumerPool createConsumerPool(final ProcessContext context, final ComponentLog log) {
            return mockConsumerPool;
        }
    };
    runner = TestRunners.newTestRunner(proc);
    runner.setProperty(KafkaProcessorUtils.BOOTSTRAP_SERVERS, "okeydokey:1234");
    final String readerId = "record-reader";
    final MockRecordParser readerService = new MockRecordParser();
    readerService.addSchemaField("name", RecordFieldType.STRING);
    readerService.addSchemaField("age", RecordFieldType.INT);
    runner.addControllerService(readerId, readerService);
    runner.enableControllerService(readerService);
    final String writerId = "record-writer";
    final RecordSetWriterFactory writerService = new MockRecordWriter("name, age");
    runner.addControllerService(writerId, writerService);
    runner.enableControllerService(writerService);
    runner.setProperty(ConsumeKafkaRecord_0_10.RECORD_READER, readerId);
    runner.setProperty(ConsumeKafkaRecord_0_10.RECORD_WRITER, writerId);
}
Also used : RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) MockRecordWriter(org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessContext(org.apache.nifi.processor.ProcessContext) MockRecordParser(org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser) Before(org.junit.Before)

Aggregations

MockRecordParser (org.apache.nifi.processors.kafka.pubsub.util.MockRecordParser)7 RecordSetWriterFactory (org.apache.nifi.serialization.RecordSetWriterFactory)7 ProcessContext (org.apache.nifi.processor.ProcessContext)6 MockRecordWriter (org.apache.nifi.processors.kafka.pubsub.util.MockRecordWriter)6 Before (org.junit.Before)6 FlowFile (org.apache.nifi.flowfile.FlowFile)4 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)4 RecordSet (org.apache.nifi.serialization.record.RecordSet)4 MockFlowFile (org.apache.nifi.util.MockFlowFile)4 ComponentLog (org.apache.nifi.logging.ComponentLog)3 ByteArrayInputStream (java.io.ByteArrayInputStream)1 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)1 RecordReader (org.apache.nifi.serialization.RecordReader)1 RecordSetWriter (org.apache.nifi.serialization.RecordSetWriter)1 Record (org.apache.nifi.serialization.record.Record)1 Test (org.junit.Test)1