Search in sources :

Example 86 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project kafka by apache.

the class TimeOrderedKeyValueBufferTest method shouldFlush.

@Test
public void shouldFlush() {
    final TimeOrderedKeyValueBuffer<String, String> buffer = bufferSupplier.apply(testName);
    final MockInternalProcessorContext context = makeContext();
    buffer.init((StateStoreContext) context, buffer);
    putRecord(buffer, context, 2L, 0L, "asdf", "2093j");
    putRecord(buffer, context, 1L, 1L, "zxcv", "3gon4i");
    putRecord(buffer, context, 0L, 2L, "deleteme", "deadbeef");
    // replace "deleteme" with a tombstone
    buffer.evictWhile(() -> buffer.minTimestamp() < 1, kv -> {
    });
    // flush everything to the changelog
    buffer.flush();
    // the buffer should serialize the buffer time and the value as byte[],
    // which we can't compare for equality using ProducerRecord.
    // As a workaround, I'm deserializing them and shoving them in a KeyValue, just for ease of testing.
    final List<ProducerRecord<String, KeyValue<Long, BufferValue>>> collected = ((MockRecordCollector) context.recordCollector()).collected().stream().map(pr -> {
        final KeyValue<Long, BufferValue> niceValue;
        if (pr.value() == null) {
            niceValue = null;
        } else {
            final byte[] serializedValue = (byte[]) pr.value();
            final ByteBuffer valueBuffer = ByteBuffer.wrap(serializedValue);
            final BufferValue contextualRecord = BufferValue.deserialize(valueBuffer);
            final long timestamp = valueBuffer.getLong();
            niceValue = new KeyValue<>(timestamp, contextualRecord);
        }
        return new ProducerRecord<>(pr.topic(), pr.partition(), pr.timestamp(), pr.key().toString(), niceValue, pr.headers());
    }).collect(Collectors.toList());
    assertThat(collected, is(asList(new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", // Producer will assign
    0, null, "deleteme", null, new RecordHeaders()), new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", 0, null, "zxcv", new KeyValue<>(1L, getBufferValue("3gon4i", 1)), CHANGELOG_HEADERS), new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", 0, null, "asdf", new KeyValue<>(2L, getBufferValue("2093j", 0)), CHANGELOG_HEADERS))));
    cleanup(context, buffer);
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) TaskId(org.apache.kafka.streams.processor.TaskId) ProcessorRecordContext(org.apache.kafka.streams.processor.internals.ProcessorRecordContext) RunWith(org.junit.runner.RunWith) Random(java.util.Random) Eviction(org.apache.kafka.streams.state.internals.TimeOrderedKeyValueBuffer.Eviction) RecordHeader(org.apache.kafka.common.header.internals.RecordHeader) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) Function(java.util.function.Function) RecordBatchingStateRestoreCallback(org.apache.kafka.streams.processor.internals.RecordBatchingStateRestoreCallback) ByteBuffer(java.nio.ByteBuffer) Collections.singletonList(java.util.Collections.singletonList) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) StateStoreContext(org.apache.kafka.streams.processor.StateStoreContext) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Arrays.asList(java.util.Arrays.asList) Serdes(org.apache.kafka.common.serialization.Serdes) StringSerializer(org.apache.kafka.common.serialization.StringSerializer) Record(org.apache.kafka.streams.processor.api.Record) Assert.fail(org.junit.Assert.fail) MockRecordCollector(org.apache.kafka.test.MockRecordCollector) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) LinkedList(java.util.LinkedList) TimestampType(org.apache.kafka.common.record.TimestampType) Parameterized(org.junit.runners.Parameterized) Utils(org.apache.kafka.common.utils.Utils) Properties(java.util.Properties) TestUtils(org.apache.kafka.test.TestUtils) UTF_8(java.nio.charset.StandardCharsets.UTF_8) Collection(java.util.Collection) KeyValue(org.apache.kafka.streams.KeyValue) CHANGELOG_HEADERS(org.apache.kafka.streams.state.internals.InMemoryTimeOrderedKeyValueBuffer.CHANGELOG_HEADERS) Test(org.junit.Test) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) MockInternalProcessorContext(org.apache.kafka.test.MockInternalProcessorContext) List(java.util.List) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Optional(java.util.Optional) Matchers.is(org.hamcrest.Matchers.is) Change(org.apache.kafka.streams.kstream.internals.Change) KeyValue(org.apache.kafka.streams.KeyValue) RecordHeaders(org.apache.kafka.common.header.internals.RecordHeaders) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) MockInternalProcessorContext(org.apache.kafka.test.MockInternalProcessorContext) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 87 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project pinpoint by naver.

the class ProducerSendInterceptor method before.

@Override
public void before(Object target, Object[] args) {
    if (logger.isDebugEnabled()) {
        logger.beforeInterceptor(target, args);
    }
    ProducerRecord record = getProducerRecord(args);
    if (record == null) {
        return;
    }
    final Trace trace = traceContext.currentRawTraceObject();
    if (trace == null) {
        return;
    }
    if (trace.canSampled()) {
        SpanEventRecorder spanEventRecorder = trace.traceBlockBegin();
        spanEventRecorder.recordServiceType(KafkaConstants.KAFKA_CLIENT);
    }
}
Also used : Trace(com.navercorp.pinpoint.bootstrap.context.Trace) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) SpanEventRecorder(com.navercorp.pinpoint.bootstrap.context.SpanEventRecorder)

Example 88 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project pinpoint by naver.

the class ProducerSendInterceptor method after.

@Override
public void after(Object target, Object[] args, Object result, Throwable throwable) {
    if (logger.isDebugEnabled()) {
        logger.afterInterceptor(target, args, result, throwable);
    }
    ProducerRecord record = getProducerRecord(args);
    if (record == null) {
        return;
    }
    final Trace trace = traceContext.currentTraceObject();
    if (trace == null) {
        return;
    }
    if (!trace.canSampled()) {
        return;
    }
    try {
        SpanEventRecorder recorder = trace.currentSpanEventRecorder();
        recorder.recordApi(descriptor);
        String remoteAddress = getRemoteAddress(target);
        recorder.recordEndPoint(remoteAddress);
        recorder.recordDestinationId(remoteAddress);
        String topic = record.topic();
        recorder.recordAttribute(KafkaConstants.KAFKA_TOPIC_ANNOTATION_KEY, topic);
        if (throwable != null) {
            recorder.recordException(throwable);
        }
        if (isHeaderRecorded) {
            headerRecorder.record(recorder, record);
        }
    } finally {
        trace.traceBlockEnd();
    }
}
Also used : Trace(com.navercorp.pinpoint.bootstrap.context.Trace) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) SpanEventRecorder(com.navercorp.pinpoint.bootstrap.context.SpanEventRecorder)

Example 89 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project beam by apache.

the class KafkaIOTest method testSinkProducerRecordsWithCustomTS.

@Test
public void testSinkProducerRecordsWithCustomTS() throws Exception {
    int numElements = 1000;
    try (MockProducerWrapper producerWrapper = new MockProducerWrapper()) {
        ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start();
        final String defaultTopic = "test";
        final Long ts = System.currentTimeMillis();
        p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()).apply(ParDo.of(new KV2ProducerRecord(defaultTopic, ts))).setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())).apply(KafkaIO.<Integer, Long>writeRecords().withBootstrapServers("none").withKeySerializer(IntegerSerializer.class).withValueSerializer(LongSerializer.class).withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey)));
        p.run();
        completionThread.shutdown();
        // Verify that messages are written with user-defined timestamp
        List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history();
        for (int i = 0; i < numElements; i++) {
            ProducerRecord<Integer, Long> record = sent.get(i);
            assertEquals(defaultTopic, record.topic());
            assertEquals(i, record.key().intValue());
            assertEquals(i, record.value().longValue());
            assertEquals(ts, record.timestamp());
        }
    }
}
Also used : Matchers.containsString(org.hamcrest.Matchers.containsString) IntegerSerializer(org.apache.kafka.common.serialization.IntegerSerializer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 90 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project beam by apache.

the class KafkaIOTest method testSinkToMultipleTopics.

@Test
public void testSinkToMultipleTopics() throws Exception {
    // Set different output topic names
    int numElements = 1000;
    try (MockProducerWrapper producerWrapper = new MockProducerWrapper()) {
        ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start();
        String defaultTopic = "test";
        p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()).apply(ParDo.of(new KV2ProducerRecord(defaultTopic, false))).setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())).apply(KafkaIO.<Integer, Long>writeRecords().withBootstrapServers("none").withKeySerializer(IntegerSerializer.class).withValueSerializer(LongSerializer.class).withInputTimestamp().withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey)));
        p.run();
        completionThread.shutdown();
        // Verify that appropriate messages are written to different Kafka topics
        List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history();
        for (int i = 0; i < numElements; i++) {
            ProducerRecord<Integer, Long> record = sent.get(i);
            if (i % 2 == 0) {
                assertEquals("test_2", record.topic());
            } else {
                assertEquals("test_1", record.topic());
            }
            assertEquals(i, record.key().intValue());
            assertEquals(i, record.value().longValue());
            assertEquals(i, record.timestamp().intValue());
            assertEquals(0, record.headers().toArray().length);
        }
    }
}
Also used : LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Matchers.containsString(org.hamcrest.Matchers.containsString) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Aggregations

ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)193 Test (org.junit.Test)90 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)57 Properties (java.util.Properties)50 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)40 ArrayList (java.util.ArrayList)39 Callback (org.apache.kafka.clients.producer.Callback)30 Future (java.util.concurrent.Future)26 TopicPartition (org.apache.kafka.common.TopicPartition)24 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)21 HashMap (java.util.HashMap)20 Random (java.util.Random)19 IOException (java.io.IOException)16 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)16 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)16 KafkaException (org.apache.kafka.common.KafkaException)16 List (java.util.List)13 MockProducer (org.apache.kafka.clients.producer.MockProducer)13 DefaultPartitioner (org.apache.kafka.clients.producer.internals.DefaultPartitioner)12 StreamsException (org.apache.kafka.streams.errors.StreamsException)12