Search in sources :

Example 11 with Producer

use of org.apache.kafka.clients.producer.Producer in project kafka by apache.

the class TopologyTestDriver method captureOutputsAndReEnqueueInternalResults.

private void captureOutputsAndReEnqueueInternalResults() {
    // Capture all the records sent to the producer ...
    final List<ProducerRecord<byte[], byte[]>> output = producer.history();
    producer.clear();
    for (final ProducerRecord<byte[], byte[]> record : output) {
        outputRecordsByTopic.computeIfAbsent(record.topic(), k -> new LinkedList<>()).add(record);
        // Forward back into the topology if the produced record is to an internal or a source topic ...
        final String outputTopicName = record.topic();
        final TopicPartition inputTopicOrPatternPartition = getInputTopicOrPatternPartition(outputTopicName);
        final TopicPartition globalInputTopicPartition = globalPartitionsByInputTopic.get(outputTopicName);
        if (inputTopicOrPatternPartition != null) {
            enqueueTaskRecord(outputTopicName, inputTopicOrPatternPartition, record.timestamp(), record.key(), record.value(), record.headers());
        }
        if (globalInputTopicPartition != null) {
            processGlobalRecord(globalInputTopicPartition, record.timestamp(), record.key(), record.value(), record.headers());
        }
    }
}
Also used : ConsumerGroupMetadata(org.apache.kafka.clients.consumer.ConsumerGroupMetadata) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) StateDirectory(org.apache.kafka.streams.processor.internals.StateDirectory) ReadOnlyKeyValueStoreFacade(org.apache.kafka.streams.state.internals.ReadOnlyKeyValueStoreFacade) LogContext(org.apache.kafka.common.utils.LogContext) ProcessorStateManager(org.apache.kafka.streams.processor.internals.ProcessorStateManager) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) Duration(java.time.Duration) Map(java.util.Map) ValueAndTimestamp.getValueOrNull(org.apache.kafka.streams.state.ValueAndTimestamp.getValueOrNull) TimestampType(org.apache.kafka.common.record.TimestampType) Sensor(org.apache.kafka.common.metrics.Sensor) TimestampedWindowStore(org.apache.kafka.streams.state.TimestampedWindowStore) Position(org.apache.kafka.streams.query.Position) ThreadCache(org.apache.kafka.streams.state.internals.ThreadCache) Set(java.util.Set) PartitionInfo(org.apache.kafka.common.PartitionInfo) StateRestoreListener(org.apache.kafka.streams.processor.StateRestoreListener) EXACTLY_ONCE_V2(org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode.EXACTLY_ONCE_V2) ProcessorContext(org.apache.kafka.streams.processor.ProcessorContext) Metrics(org.apache.kafka.common.metrics.Metrics) ClientUtils(org.apache.kafka.streams.processor.internals.ClientUtils) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) ProcessorTopology(org.apache.kafka.streams.processor.internals.ProcessorTopology) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) GlobalProcessorContextImpl(org.apache.kafka.streams.processor.internals.GlobalProcessorContextImpl) TopologyException(org.apache.kafka.streams.errors.TopologyException) Supplier(java.util.function.Supplier) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer) ThreadLocalRandom(java.util.concurrent.ThreadLocalRandom) PunctuationType(org.apache.kafka.streams.processor.PunctuationType) Deserializer(org.apache.kafka.common.serialization.Deserializer) SessionStore(org.apache.kafka.streams.state.SessionStore) RecordCollectorImpl(org.apache.kafka.streams.processor.internals.RecordCollectorImpl) Properties(java.util.Properties) Producer(org.apache.kafka.clients.producer.Producer) LogAndContinueExceptionHandler(org.apache.kafka.streams.errors.LogAndContinueExceptionHandler) IOException(java.io.IOException) TimestampedKeyValueStore(org.apache.kafka.streams.state.TimestampedKeyValueStore) AtomicLong(java.util.concurrent.atomic.AtomicLong) StateStore(org.apache.kafka.streams.processor.StateStore) Serializer(org.apache.kafka.common.serialization.Serializer) InternalTopologyBuilder(org.apache.kafka.streams.processor.internals.InternalTopologyBuilder) ReadOnlyWindowStoreFacade(org.apache.kafka.streams.state.internals.ReadOnlyWindowStoreFacade) ReadOnlySessionStore(org.apache.kafka.streams.state.ReadOnlySessionStore) MockProducer(org.apache.kafka.clients.producer.MockProducer) TaskMetrics(org.apache.kafka.streams.processor.internals.metrics.TaskMetrics) GlobalStateManager(org.apache.kafka.streams.processor.internals.GlobalStateManager) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) ChangelogRegister(org.apache.kafka.streams.processor.internals.ChangelogRegister) LoggerFactory(org.slf4j.LoggerFactory) RecordCollector(org.apache.kafka.streams.processor.internals.RecordCollector) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) AT_LEAST_ONCE(org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode.AT_LEAST_ONCE) Metric(org.apache.kafka.common.Metric) MetricName(org.apache.kafka.common.MetricName) StreamsMetricsImpl(org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl) TestRecord(org.apache.kafka.streams.test.TestRecord) ProcessorContextImpl(org.apache.kafka.streams.processor.internals.ProcessorContextImpl) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) StreamsConfigUtils(org.apache.kafka.streams.internals.StreamsConfigUtils) Time(org.apache.kafka.common.utils.Time) InternalProcessorContext(org.apache.kafka.streams.processor.internals.InternalProcessorContext) Collection(java.util.Collection) MetricConfig(org.apache.kafka.common.metrics.MetricConfig) UUID(java.util.UUID) Instant(java.time.Instant) Objects(java.util.Objects) List(java.util.List) GlobalStateManagerImpl(org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl) ProducerFencedException(org.apache.kafka.common.errors.ProducerFencedException) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Optional(java.util.Optional) Queue(java.util.Queue) Pattern(java.util.regex.Pattern) ReadOnlyWindowStore(org.apache.kafka.streams.state.ReadOnlyWindowStore) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) TaskId(org.apache.kafka.streams.processor.TaskId) TaskConfig(org.apache.kafka.streams.processor.internals.namedtopology.TopologyConfig.TaskConfig) Headers(org.apache.kafka.common.header.Headers) GlobalStateUpdateTask(org.apache.kafka.streams.processor.internals.GlobalStateUpdateTask) HashMap(java.util.HashMap) WindowStore(org.apache.kafka.streams.state.WindowStore) HashSet(java.util.HashSet) StateStoreContext(org.apache.kafka.streams.processor.StateStoreContext) Windowed(org.apache.kafka.streams.kstream.Windowed) LinkedList(java.util.LinkedList) NoSuchElementException(java.util.NoSuchElementException) Logger(org.slf4j.Logger) Punctuator(org.apache.kafka.streams.processor.Punctuator) StreamsProducer(org.apache.kafka.streams.processor.internals.StreamsProducer) StreamTask(org.apache.kafka.streams.processor.internals.StreamTask) Task(org.apache.kafka.streams.processor.internals.Task) TimeUnit(java.util.concurrent.TimeUnit) EXACTLY_ONCE_ALPHA(org.apache.kafka.streams.internals.StreamsConfigUtils.ProcessingMode.EXACTLY_ONCE_ALPHA) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) Closeable(java.io.Closeable) Collections(java.util.Collections) TopicPartition(org.apache.kafka.common.TopicPartition) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) LinkedList(java.util.LinkedList)

Example 12 with Producer

use of org.apache.kafka.clients.producer.Producer in project incubator-atlas by apache.

the class KafkaNotificationMockTest method shouldCollectAllFailedMessagesIfProducerFails.

@Test
@SuppressWarnings("unchecked")
public void shouldCollectAllFailedMessagesIfProducerFails() throws NotificationException, ExecutionException, InterruptedException {
    Properties configProperties = mock(Properties.class);
    KafkaNotification kafkaNotification = new KafkaNotification(configProperties);
    Producer producer = mock(Producer.class);
    String topicName = kafkaNotification.getTopicName(NotificationInterface.NotificationType.HOOK);
    String message1 = "This is a test message1";
    String message2 = "This is a test message2";
    Future returnValue1 = mock(Future.class);
    when(returnValue1.get()).thenThrow(new RuntimeException("Simulating exception"));
    Future returnValue2 = mock(Future.class);
    when(returnValue2.get()).thenThrow(new RuntimeException("Simulating exception"));
    ProducerRecord expectedRecord1 = new ProducerRecord(topicName, message1);
    when(producer.send(expectedRecord1)).thenReturn(returnValue1);
    ProducerRecord expectedRecord2 = new ProducerRecord(topicName, message2);
    when(producer.send(expectedRecord2)).thenReturn(returnValue1);
    try {
        kafkaNotification.sendInternalToProducer(producer, NotificationInterface.NotificationType.HOOK, new String[] { message1, message2 });
        fail("Should have thrown NotificationException");
    } catch (NotificationException e) {
        assertEquals(e.getFailedMessages().size(), 2);
        assertEquals(e.getFailedMessages().get(0), "This is a test message1");
        assertEquals(e.getFailedMessages().get(1), "This is a test message2");
    }
}
Also used : Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) NotificationException(org.apache.atlas.notification.NotificationException) Future(java.util.concurrent.Future) Properties(java.util.Properties) Test(org.testng.annotations.Test)

Example 13 with Producer

use of org.apache.kafka.clients.producer.Producer in project incubator-atlas by apache.

the class KafkaNotificationMockTest method shouldThrowExceptionIfProducerFails.

@Test
@SuppressWarnings("unchecked")
public void shouldThrowExceptionIfProducerFails() throws NotificationException, ExecutionException, InterruptedException {
    Properties configProperties = mock(Properties.class);
    KafkaNotification kafkaNotification = new KafkaNotification(configProperties);
    Producer producer = mock(Producer.class);
    String topicName = kafkaNotification.getTopicName(NotificationInterface.NotificationType.HOOK);
    String message = "This is a test message";
    Future returnValue = mock(Future.class);
    when(returnValue.get()).thenThrow(new RuntimeException("Simulating exception"));
    ProducerRecord expectedRecord = new ProducerRecord(topicName, message);
    when(producer.send(expectedRecord)).thenReturn(returnValue);
    try {
        kafkaNotification.sendInternalToProducer(producer, NotificationInterface.NotificationType.HOOK, new String[] { message });
        fail("Should have thrown NotificationException");
    } catch (NotificationException e) {
        assertEquals(e.getFailedMessages().size(), 1);
        assertEquals(e.getFailedMessages().get(0), "This is a test message");
    }
}
Also used : Producer(org.apache.kafka.clients.producer.Producer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) NotificationException(org.apache.atlas.notification.NotificationException) Future(java.util.concurrent.Future) Properties(java.util.Properties) Test(org.testng.annotations.Test)

Aggregations

Producer (org.apache.kafka.clients.producer.Producer)13 Properties (java.util.Properties)10 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)9 HashMap (java.util.HashMap)6 Future (java.util.concurrent.Future)6 TopicPartition (org.apache.kafka.common.TopicPartition)6 Test (org.testng.annotations.Test)6 Set (java.util.Set)5 ArrayList (java.util.ArrayList)4 Collection (java.util.Collection)4 Collections (java.util.Collections)4 List (java.util.List)4 Map (java.util.Map)4 NotificationException (org.apache.atlas.notification.NotificationException)4 HashSet (java.util.HashSet)3 Objects (java.util.Objects)3 Optional (java.util.Optional)3 TimeUnit (java.util.concurrent.TimeUnit)3 Collectors (java.util.stream.Collectors)3 Consumer (org.apache.kafka.clients.consumer.Consumer)3