use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class ProcessorContextImplTest method shouldSendRecordHeadersToChangelogTopicWhenConsistencyEnabled.
@Test
public void shouldSendRecordHeadersToChangelogTopicWhenConsistencyEnabled() {
final Position position = Position.emptyPosition();
final Headers headers = new RecordHeaders();
headers.add(ChangelogRecordDeserializationHelper.CHANGELOG_VERSION_HEADER_RECORD_CONSISTENCY);
headers.add(new RecordHeader(ChangelogRecordDeserializationHelper.CHANGELOG_POSITION_HEADER_KEY, PositionSerde.serialize(position).array()));
recordCollector.send(CHANGELOG_PARTITION.topic(), KEY_BYTES, VALUE_BYTES, headers, CHANGELOG_PARTITION.partition(), TIMESTAMP, BYTES_KEY_SERIALIZER, BYTEARRAY_VALUE_SERIALIZER);
final StreamTask task = EasyMock.createNiceMock(StreamTask.class);
replay(recordCollector, task);
context = new ProcessorContextImpl(mock(TaskId.class), streamsConfigWithConsistencyMock(), stateManager, mock(StreamsMetricsImpl.class), mock(ThreadCache.class));
context.transitionToActive(task, recordCollector, null);
context.logChange(REGISTERED_STORE_NAME, KEY_BYTES, VALUE_BYTES, TIMESTAMP, position);
verify(recordCollector);
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class RecordCollectorTest method shouldThrowInformativeStreamsExceptionOnKeyClassCastException.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void shouldThrowInformativeStreamsExceptionOnKeyClassCastException() {
final StreamsException expected = assertThrows(StreamsException.class, () -> this.collector.send("topic", "key", "value", new RecordHeaders(), 0, 0L, // need to add cast to trigger `ClassCastException`
(Serializer) new LongSerializer(), new StringSerializer()));
assertThat(expected.getCause(), instanceOf(ClassCastException.class));
assertThat(expected.getMessage(), equalTo("ClassCastException while producing data to topic topic. " + "A serializer (key: org.apache.kafka.common.serialization.LongSerializer / value: org.apache.kafka.common.serialization.StringSerializer) " + "is not compatible to the actual key or value type (key type: java.lang.String / value type: java.lang.String). " + "Change the default Serdes in StreamConfig or provide correct Serdes via method parameters " + "(for example if using the DSL, `#to(String topic, Produced<K, V> produced)` with `Produced.keySerde(WindowedSerdes.timeWindowedSerdeFrom(String.class))`)."));
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class RecordCollectorTest method shouldThrowInformativeStreamsExceptionOnKeyAndNullValueClassCastException.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void shouldThrowInformativeStreamsExceptionOnKeyAndNullValueClassCastException() {
final StreamsException expected = assertThrows(StreamsException.class, () -> this.collector.send("topic", "key", null, new RecordHeaders(), 0, 0L, // need to add cast to trigger `ClassCastException`
(Serializer) new LongSerializer(), new StringSerializer()));
assertThat(expected.getCause(), instanceOf(ClassCastException.class));
assertThat(expected.getMessage(), equalTo("ClassCastException while producing data to topic topic. " + "A serializer (key: org.apache.kafka.common.serialization.LongSerializer / value: org.apache.kafka.common.serialization.StringSerializer) " + "is not compatible to the actual key or value type (key type: java.lang.String / value type: unknown because value is null). " + "Change the default Serdes in StreamConfig or provide correct Serdes via method parameters " + "(for example if using the DSL, `#to(String topic, Produced<K, V> produced)` with `Produced.keySerde(WindowedSerdes.timeWindowedSerdeFrom(String.class))`)."));
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class RecordCollectorTest method shouldThrowInformativeStreamsExceptionOnValueAndNullKeyClassCastException.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
public void shouldThrowInformativeStreamsExceptionOnValueAndNullKeyClassCastException() {
final StreamsException expected = assertThrows(StreamsException.class, () -> this.collector.send("topic", null, "value", new RecordHeaders(), 0, 0L, new StringSerializer(), // need to add cast to trigger `ClassCastException`
(Serializer) new LongSerializer()));
assertThat(expected.getCause(), instanceOf(ClassCastException.class));
assertThat(expected.getMessage(), equalTo("ClassCastException while producing data to topic topic. " + "A serializer (key: org.apache.kafka.common.serialization.StringSerializer / value: org.apache.kafka.common.serialization.LongSerializer) " + "is not compatible to the actual key or value type (key type: unknown because key is null / value type: java.lang.String). " + "Change the default Serdes in StreamConfig or provide correct Serdes via method parameters " + "(for example if using the DSL, `#to(String topic, Produced<K, V> produced)` with `Produced.keySerde(WindowedSerdes.timeWindowedSerdeFrom(String.class))`)."));
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class RecordCollectorTest method shouldSendWithPartitioner.
@Test
public void shouldSendWithPartitioner() {
final Headers headers = new RecordHeaders(new Header[] { new RecordHeader("key", "value".getBytes()) });
collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "9", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "27", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "81", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "243", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "28", "0", headers, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "82", "0", headers, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "244", "0", headers, null, stringSerializer, stringSerializer, streamPartitioner);
collector.send(topic, "245", "0", null, null, stringSerializer, stringSerializer, streamPartitioner);
final Map<TopicPartition, Long> offsets = collector.offsets();
assertEquals(4L, (long) offsets.get(new TopicPartition(topic, 0)));
assertEquals(2L, (long) offsets.get(new TopicPartition(topic, 1)));
assertEquals(0L, (long) offsets.get(new TopicPartition(topic, 2)));
assertEquals(9, mockProducer.history().size());
// returned offsets should not be modified
final TopicPartition topicPartition = new TopicPartition(topic, 0);
assertThrows(UnsupportedOperationException.class, () -> offsets.put(topicPartition, 50L));
}
Aggregations