use of org.apache.kafka.clients.producer.ProducerRecord in project kafka by apache.
the class TimeOrderedKeyValueBufferTest method shouldFlush.
@Test
public void shouldFlush() {
final TimeOrderedKeyValueBuffer<String, String> buffer = bufferSupplier.apply(testName);
final MockInternalProcessorContext context = makeContext();
buffer.init((StateStoreContext) context, buffer);
putRecord(buffer, context, 2L, 0L, "asdf", "2093j");
putRecord(buffer, context, 1L, 1L, "zxcv", "3gon4i");
putRecord(buffer, context, 0L, 2L, "deleteme", "deadbeef");
// replace "deleteme" with a tombstone
buffer.evictWhile(() -> buffer.minTimestamp() < 1, kv -> {
});
// flush everything to the changelog
buffer.flush();
// the buffer should serialize the buffer time and the value as byte[],
// which we can't compare for equality using ProducerRecord.
// As a workaround, I'm deserializing them and shoving them in a KeyValue, just for ease of testing.
final List<ProducerRecord<String, KeyValue<Long, BufferValue>>> collected = ((MockRecordCollector) context.recordCollector()).collected().stream().map(pr -> {
final KeyValue<Long, BufferValue> niceValue;
if (pr.value() == null) {
niceValue = null;
} else {
final byte[] serializedValue = (byte[]) pr.value();
final ByteBuffer valueBuffer = ByteBuffer.wrap(serializedValue);
final BufferValue contextualRecord = BufferValue.deserialize(valueBuffer);
final long timestamp = valueBuffer.getLong();
niceValue = new KeyValue<>(timestamp, contextualRecord);
}
return new ProducerRecord<>(pr.topic(), pr.partition(), pr.timestamp(), pr.key().toString(), niceValue, pr.headers());
}).collect(Collectors.toList());
assertThat(collected, is(asList(new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", // Producer will assign
0, null, "deleteme", null, new RecordHeaders()), new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", 0, null, "zxcv", new KeyValue<>(1L, getBufferValue("3gon4i", 1)), CHANGELOG_HEADERS), new ProducerRecord<>(APP_ID + "-" + testName + "-changelog", 0, null, "asdf", new KeyValue<>(2L, getBufferValue("2093j", 0)), CHANGELOG_HEADERS))));
cleanup(context, buffer);
}
use of org.apache.kafka.clients.producer.ProducerRecord in project pinpoint by naver.
the class ProducerSendInterceptor method before.
@Override
public void before(Object target, Object[] args) {
if (logger.isDebugEnabled()) {
logger.beforeInterceptor(target, args);
}
ProducerRecord record = getProducerRecord(args);
if (record == null) {
return;
}
final Trace trace = traceContext.currentRawTraceObject();
if (trace == null) {
return;
}
if (trace.canSampled()) {
SpanEventRecorder spanEventRecorder = trace.traceBlockBegin();
spanEventRecorder.recordServiceType(KafkaConstants.KAFKA_CLIENT);
}
}
use of org.apache.kafka.clients.producer.ProducerRecord in project pinpoint by naver.
the class ProducerSendInterceptor method after.
@Override
public void after(Object target, Object[] args, Object result, Throwable throwable) {
if (logger.isDebugEnabled()) {
logger.afterInterceptor(target, args, result, throwable);
}
ProducerRecord record = getProducerRecord(args);
if (record == null) {
return;
}
final Trace trace = traceContext.currentTraceObject();
if (trace == null) {
return;
}
if (!trace.canSampled()) {
return;
}
try {
SpanEventRecorder recorder = trace.currentSpanEventRecorder();
recorder.recordApi(descriptor);
String remoteAddress = getRemoteAddress(target);
recorder.recordEndPoint(remoteAddress);
recorder.recordDestinationId(remoteAddress);
String topic = record.topic();
recorder.recordAttribute(KafkaConstants.KAFKA_TOPIC_ANNOTATION_KEY, topic);
if (throwable != null) {
recorder.recordException(throwable);
}
if (isHeaderRecorded) {
headerRecorder.record(recorder, record);
}
} finally {
trace.traceBlockEnd();
}
}
use of org.apache.kafka.clients.producer.ProducerRecord in project beam by apache.
the class KafkaIOTest method testSinkProducerRecordsWithCustomTS.
@Test
public void testSinkProducerRecordsWithCustomTS() throws Exception {
int numElements = 1000;
try (MockProducerWrapper producerWrapper = new MockProducerWrapper()) {
ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start();
final String defaultTopic = "test";
final Long ts = System.currentTimeMillis();
p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()).apply(ParDo.of(new KV2ProducerRecord(defaultTopic, ts))).setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())).apply(KafkaIO.<Integer, Long>writeRecords().withBootstrapServers("none").withKeySerializer(IntegerSerializer.class).withValueSerializer(LongSerializer.class).withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey)));
p.run();
completionThread.shutdown();
// Verify that messages are written with user-defined timestamp
List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history();
for (int i = 0; i < numElements; i++) {
ProducerRecord<Integer, Long> record = sent.get(i);
assertEquals(defaultTopic, record.topic());
assertEquals(i, record.key().intValue());
assertEquals(i, record.value().longValue());
assertEquals(ts, record.timestamp());
}
}
}
use of org.apache.kafka.clients.producer.ProducerRecord in project beam by apache.
the class KafkaIOTest method testSinkToMultipleTopics.
@Test
public void testSinkToMultipleTopics() throws Exception {
// Set different output topic names
int numElements = 1000;
try (MockProducerWrapper producerWrapper = new MockProducerWrapper()) {
ProducerSendCompletionThread completionThread = new ProducerSendCompletionThread(producerWrapper.mockProducer).start();
String defaultTopic = "test";
p.apply(mkKafkaReadTransform(numElements, new ValueAsTimestampFn()).withoutMetadata()).apply(ParDo.of(new KV2ProducerRecord(defaultTopic, false))).setCoder(ProducerRecordCoder.of(VarIntCoder.of(), VarLongCoder.of())).apply(KafkaIO.<Integer, Long>writeRecords().withBootstrapServers("none").withKeySerializer(IntegerSerializer.class).withValueSerializer(LongSerializer.class).withInputTimestamp().withProducerFactoryFn(new ProducerFactoryFn(producerWrapper.producerKey)));
p.run();
completionThread.shutdown();
// Verify that appropriate messages are written to different Kafka topics
List<ProducerRecord<Integer, Long>> sent = producerWrapper.mockProducer.history();
for (int i = 0; i < numElements; i++) {
ProducerRecord<Integer, Long> record = sent.get(i);
if (i % 2 == 0) {
assertEquals("test_2", record.topic());
} else {
assertEquals("test_1", record.topic());
}
assertEquals(i, record.key().intValue());
assertEquals(i, record.value().longValue());
assertEquals(i, record.timestamp().intValue());
assertEquals(0, record.headers().toArray().length);
}
}
}
Aggregations