Search in sources :

Example 11 with ConsumerRecord

use of org.apache.kafka.clients.consumer.ConsumerRecord in project flink by apache.

the class Kafka09FetcherTest method testCancellationWhenEmitBlocks.

@Test
public void testCancellationWhenEmitBlocks() throws Exception {
    // ----- some test data -----
    final String topic = "test-topic";
    final int partition = 3;
    final byte[] payload = new byte[] { 1, 2, 3, 4 };
    final List<ConsumerRecord<byte[], byte[]>> records = Arrays.asList(new ConsumerRecord<byte[], byte[]>(topic, partition, 15, payload, payload), new ConsumerRecord<byte[], byte[]>(topic, partition, 16, payload, payload), new ConsumerRecord<byte[], byte[]>(topic, partition, 17, payload, payload));
    final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> data = new HashMap<>();
    data.put(new TopicPartition(topic, partition), records);
    final ConsumerRecords<byte[], byte[]> consumerRecords = new ConsumerRecords<>(data);
    // ----- the test consumer -----
    final KafkaConsumer<?, ?> mockConsumer = mock(KafkaConsumer.class);
    when(mockConsumer.poll(anyLong())).thenAnswer(new Answer<ConsumerRecords<?, ?>>() {

        @Override
        public ConsumerRecords<?, ?> answer(InvocationOnMock invocation) {
            return consumerRecords;
        }
    });
    whenNew(KafkaConsumer.class).withAnyArguments().thenReturn(mockConsumer);
    // ----- build a fetcher -----
    BlockingSourceContext<String> sourceContext = new BlockingSourceContext<>();
    Map<KafkaTopicPartition, Long> partitionsWithInitialOffsets = Collections.singletonMap(new KafkaTopicPartition(topic, partition), KafkaTopicPartitionStateSentinel.GROUP_OFFSET);
    KeyedDeserializationSchema<String> schema = new KeyedDeserializationSchemaWrapper<>(new SimpleStringSchema());
    final Kafka09Fetcher<String> fetcher = new Kafka09Fetcher<>(sourceContext, partitionsWithInitialOffsets, null, /* periodic watermark extractor */
    null, /* punctuated watermark extractor */
    new TestProcessingTimeService(), 10, /* watermark interval */
    this.getClass().getClassLoader(), "task_name", new UnregisteredMetricsGroup(), schema, new Properties(), 0L, false);
    // ----- run the fetcher -----
    final AtomicReference<Throwable> error = new AtomicReference<>();
    final Thread fetcherRunner = new Thread("fetcher runner") {

        @Override
        public void run() {
            try {
                fetcher.runFetchLoop();
            } catch (Throwable t) {
                error.set(t);
            }
        }
    };
    fetcherRunner.start();
    // wait until the thread started to emit records to the source context
    sourceContext.waitTillHasBlocker();
    // now we try to cancel the fetcher, including the interruption usually done on the task thread
    // once it has finished, there must be no more thread blocked on the source context
    fetcher.cancel();
    fetcherRunner.interrupt();
    fetcherRunner.join();
    assertFalse("fetcher threads did not properly finish", sourceContext.isStillBlocking());
}
Also used : UnregisteredMetricsGroup(org.apache.flink.metrics.groups.UnregisteredMetricsGroup) HashMap(java.util.HashMap) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) Properties(java.util.Properties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) KeyedDeserializationSchemaWrapper(org.apache.flink.streaming.util.serialization.KeyedDeserializationSchemaWrapper) List(java.util.List) AtomicReference(java.util.concurrent.atomic.AtomicReference) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) KafkaConsumerThread(org.apache.flink.streaming.connectors.kafka.internal.KafkaConsumerThread) Kafka09Fetcher(org.apache.flink.streaming.connectors.kafka.internal.Kafka09Fetcher) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Mockito.anyLong(org.mockito.Mockito.anyLong) SimpleStringSchema(org.apache.flink.streaming.util.serialization.SimpleStringSchema) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) PrepareForTest(org.powermock.core.classloader.annotations.PrepareForTest) Test(org.junit.Test)

Example 12 with ConsumerRecord

use of org.apache.kafka.clients.consumer.ConsumerRecord in project camel by apache.

the class KafkaEndpointTest method testCreatingKafkaExchangeSetsHeaders.

@Test
public void testCreatingKafkaExchangeSetsHeaders() throws URISyntaxException {
    KafkaEndpoint endpoint = new KafkaEndpoint("kafka:mytopic?brokers=localhost", new KafkaComponent());
    ConsumerRecord<String, String> record = new ConsumerRecord<String, String>("topic", 4, 56, "somekey", "");
    Exchange exchange = endpoint.createKafkaExchange(record);
    assertEquals("somekey", exchange.getIn().getHeader(KafkaConstants.KEY));
    assertEquals("topic", exchange.getIn().getHeader(KafkaConstants.TOPIC));
    assertEquals(4, exchange.getIn().getHeader(KafkaConstants.PARTITION));
    assertEquals(56L, exchange.getIn().getHeader(KafkaConstants.OFFSET));
}
Also used : Exchange(org.apache.camel.Exchange) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Test(org.junit.Test)

Example 13 with ConsumerRecord

use of org.apache.kafka.clients.consumer.ConsumerRecord in project open-kilda by telstra.

the class Consumer method run.

@Override
public void run() {
    Properties kprops = new Properties();
    kprops.put("bootstrap.servers", kafka.url());
    kprops.put("group.id", groupId);
    // NB: only "true" is valid; no code to handle "false" yet.
    kprops.put("enable.auto.commit", "true");
    kprops.put("auto.commit.interval.ms", commitInterval);
    kprops.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    kprops.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(kprops);
    consumer.subscribe(Arrays.asList(topicName));
    Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("heartbeat.consumer").build()).execute(new Runnable() {

        @Override
        public void run() {
            try {
                while (true) {
                    logger.trace("==> Poll for records");
                    ConsumerRecords<String, String> records = consumer.poll(1000);
                    logger.trace("==> Number of records: " + records.count());
                    // consumer.seekToBeginning(records.partitions());
                    for (ConsumerRecord<String, String> record : records) logger.debug("==> ==> offset = {}, key = {}, value = {}", record.offset(), record.key(), record.value());
                    Thread.sleep(sleepTime);
                }
            } catch (InterruptedException e) {
                logger.info("Heartbeat Consumer Interrupted");
            } finally {
                consumer.close();
            }
        }
    });
}
Also used : KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) Properties(java.util.Properties) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord)

Example 14 with ConsumerRecord

use of org.apache.kafka.clients.consumer.ConsumerRecord in project nifi by apache.

the class ConsumerLease method writeData.

private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) {
    FlowFile flowFile = session.create();
    final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding);
    tracker.incrementRecordCount(1);
    final byte[] value = record.value();
    if (value != null) {
        flowFile = session.write(flowFile, out -> {
            out.write(value);
        });
    }
    flowFile = session.putAllAttributes(flowFile, getAttributes(record));
    tracker.updateFlowFile(flowFile);
    populateAttributes(tracker);
    session.transfer(tracker.flowFile, REL_SUCCESS);
}
Also used : KafkaException(org.apache.kafka.common.KafkaException) HashMap(java.util.HashMap) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) UTF8_ENCODING(org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.UTF8_ENCODING) RecordSchema(org.apache.nifi.serialization.record.RecordSchema) ByteArrayInputStream(java.io.ByteArrayInputStream) Charset(java.nio.charset.Charset) RecordReader(org.apache.nifi.serialization.RecordReader) Map(java.util.Map) Record(org.apache.nifi.serialization.record.Record) OutputStream(java.io.OutputStream) Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) HEX_ENCODING(org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.HEX_ENCODING) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) WriteResult(org.apache.nifi.serialization.WriteResult) REL_PARSE_FAILURE(org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_1_0.REL_PARSE_FAILURE) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) RecordSetWriterFactory(org.apache.nifi.serialization.RecordSetWriterFactory) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) ConsumerRebalanceListener(org.apache.kafka.clients.consumer.ConsumerRebalanceListener) List(java.util.List) RecordReaderFactory(org.apache.nifi.serialization.RecordReaderFactory) REL_SUCCESS(org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_1_0.REL_SUCCESS) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Closeable(java.io.Closeable) OffsetAndMetadata(org.apache.kafka.clients.consumer.OffsetAndMetadata) Pattern(java.util.regex.Pattern) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) DatatypeConverter(javax.xml.bind.DatatypeConverter) RecordSetWriter(org.apache.nifi.serialization.RecordSetWriter) InputStream(java.io.InputStream) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) FlowFile(org.apache.nifi.flowfile.FlowFile)

Example 15 with ConsumerRecord

use of org.apache.kafka.clients.consumer.ConsumerRecord in project ksql by confluentinc.

the class ConsumerCollector method addSensor.

private void addSensor(String key, String metricNameString, MeasurableStat stat, List<TopicSensors.SensorMetric<ConsumerRecord>> sensors, boolean isError, Function<ConsumerRecord, Double> recordValue) {
    String name = "cons-" + key + "-" + metricNameString + "-" + id;
    MetricName metricName = new MetricName(metricNameString, "consumer-metrics", "consumer-" + name, ImmutableMap.of("key", key, "id", id));
    Sensor existingSensor = metrics.getSensor(name);
    Sensor sensor = metrics.sensor(name);
    // re-use the existing measurable stats to share between consumers
    if (existingSensor == null || metrics.metrics().get(metricName) == null) {
        sensor.add(metricName, stat);
    }
    KafkaMetric metric = metrics.metrics().get(metricName);
    sensors.add(new TopicSensors.SensorMetric<ConsumerRecord>(sensor, metric, time, isError) {

        void record(ConsumerRecord record) {
            sensor.record(recordValue.apply(record));
            super.record(record);
        }
    });
}
Also used : MetricName(org.apache.kafka.common.MetricName) KafkaMetric(org.apache.kafka.common.metrics.KafkaMetric) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Sensor(org.apache.kafka.common.metrics.Sensor)

Aggregations

ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)309 TopicPartition (org.apache.kafka.common.TopicPartition)158 Test (org.junit.Test)145 ArrayList (java.util.ArrayList)120 List (java.util.List)99 HashMap (java.util.HashMap)97 Map (java.util.Map)70 RecordHeaders (org.apache.kafka.common.header.internals.RecordHeaders)61 ConsumerRecords (org.apache.kafka.clients.consumer.ConsumerRecords)51 Test (org.junit.jupiter.api.Test)35 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)33 OffsetAndMetadata (org.apache.kafka.clients.consumer.OffsetAndMetadata)31 LinkedHashMap (java.util.LinkedHashMap)30 Header (org.apache.kafka.common.header.Header)29 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)28 RecordHeader (org.apache.kafka.common.header.internals.RecordHeader)28 TimeUnit (java.util.concurrent.TimeUnit)27 Set (java.util.Set)24 Collectors (java.util.stream.Collectors)24 ByteBuffer (java.nio.ByteBuffer)22