use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class RecordQueueTest method shouldPassPartitionTimeToTimestampExtractor.
@Test
public void shouldPassPartitionTimeToTimestampExtractor() {
final PartitionTimeTrackingTimestampExtractor timestampExtractor = new PartitionTimeTrackingTimestampExtractor();
final RecordQueue queue = new RecordQueue(new TopicPartition("topic", 1), mockSourceNodeWithMetrics, timestampExtractor, new LogAndFailExceptionHandler(), context, new LogContext());
assertTrue(queue.isEmpty());
assertEquals(0, queue.size());
assertEquals(RecordQueue.UNKNOWN, queue.headRecordTimestamp());
// add three 3 out-of-order records with timestamp 2, 1, 3, 4
final List<ConsumerRecord<byte[], byte[]>> list1 = Arrays.asList(new ConsumerRecord<>("topic", 1, 2, 0L, TimestampType.CREATE_TIME, 0, 0, recordKey, recordValue, new RecordHeaders(), Optional.empty()), new ConsumerRecord<>("topic", 1, 1, 0L, TimestampType.CREATE_TIME, 0, 0, recordKey, recordValue, new RecordHeaders(), Optional.empty()), new ConsumerRecord<>("topic", 1, 3, 0L, TimestampType.CREATE_TIME, 0, 0, recordKey, recordValue, new RecordHeaders(), Optional.empty()), new ConsumerRecord<>("topic", 1, 4, 0L, TimestampType.CREATE_TIME, 0, 0, recordKey, recordValue, new RecordHeaders(), Optional.empty()));
assertEquals(RecordQueue.UNKNOWN, timestampExtractor.partitionTime);
queue.addRawRecords(list1);
// no (known) timestamp has yet been passed to the timestamp extractor
assertEquals(RecordQueue.UNKNOWN, timestampExtractor.partitionTime);
queue.poll();
assertEquals(2L, timestampExtractor.partitionTime);
queue.poll();
assertEquals(2L, timestampExtractor.partitionTime);
queue.poll();
assertEquals(3L, timestampExtractor.partitionTime);
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class SourceNodeTest method shouldProvideTopicHeadersAndDataToKeyDeserializer.
@Test
public void shouldProvideTopicHeadersAndDataToKeyDeserializer() {
final SourceNode<String, String> sourceNode = new MockSourceNode<>(new TheDeserializer(), new TheDeserializer());
final RecordHeaders headers = new RecordHeaders();
final String deserializeKey = sourceNode.deserializeKey("topic", headers, "data".getBytes(StandardCharsets.UTF_8));
assertThat(deserializeKey, is("topic" + headers + "data"));
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class SourceNodeTest method shouldProvideTopicHeadersAndDataToValueDeserializer.
@Test
public void shouldProvideTopicHeadersAndDataToValueDeserializer() {
final SourceNode<String, String> sourceNode = new MockSourceNode<>(new TheDeserializer(), new TheDeserializer());
final RecordHeaders headers = new RecordHeaders();
final String deserializedValue = sourceNode.deserializeValue("topic", headers, "data".getBytes(StandardCharsets.UTF_8));
assertThat(deserializedValue, is("topic" + headers + "data"));
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class ProcessorRecordContextTest method shouldEstimateNullValueInHeaderAsZero.
@Test
public void shouldEstimateNullValueInHeaderAsZero() {
final Headers headers = new RecordHeaders();
headers.add("header-key", null);
final ProcessorRecordContext context = new ProcessorRecordContext(42L, 73L, 0, null, headers);
assertEquals(MIN_SIZE + 10L, context.residentMemorySizeEstimate());
}
use of org.apache.kafka.common.header.internals.RecordHeaders in project kafka by apache.
the class ProcessorRecordContextTest method shouldEstimateTopicLength.
@Test
public void shouldEstimateTopicLength() {
final ProcessorRecordContext context = new ProcessorRecordContext(42L, 73L, 0, "topic", new RecordHeaders());
assertEquals(MIN_SIZE + 5L, context.residentMemorySizeEstimate());
}
Aggregations