Search in sources :

Example 96 with MemoryRecordsBuilder

use of org.apache.kafka.common.record.MemoryRecordsBuilder in project kafka by apache.

the class ProduceRequestTest method testV3AndAboveCannotUseMagicV0.

@Test
public void testV3AndAboveCannotUseMagicV0() {
    ByteBuffer buffer = ByteBuffer.allocate(256);
    MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V0, CompressionType.NONE, TimestampType.NO_TIMESTAMP_TYPE, 0L);
    builder.append(10L, null, "a".getBytes());
    ProduceRequest.Builder requestBuilder = ProduceRequest.forCurrentMagic(new ProduceRequestData().setTopicData(new ProduceRequestData.TopicProduceDataCollection(Collections.singletonList(new ProduceRequestData.TopicProduceData().setName("test").setPartitionData(Collections.singletonList(new ProduceRequestData.PartitionProduceData().setIndex(0).setRecords(builder.build())))).iterator())).setAcks((short) 1).setTimeoutMs(5000));
    assertThrowsForAllVersions(requestBuilder, InvalidRecordException.class);
}
Also used : ProduceRequestData(org.apache.kafka.common.message.ProduceRequestData) MemoryRecordsBuilder(org.apache.kafka.common.record.MemoryRecordsBuilder) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.jupiter.api.Test)

Example 97 with MemoryRecordsBuilder

use of org.apache.kafka.common.record.MemoryRecordsBuilder in project kafka by apache.

the class ProduceRequestTest method testV3AndAboveShouldContainOnlyOneRecordBatch.

@Test
public void testV3AndAboveShouldContainOnlyOneRecordBatch() {
    ByteBuffer buffer = ByteBuffer.allocate(256);
    MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 0L);
    builder.append(10L, null, "a".getBytes());
    builder.close();
    builder = MemoryRecords.builder(buffer, CompressionType.NONE, TimestampType.CREATE_TIME, 1L);
    builder.append(11L, "1".getBytes(), "b".getBytes());
    builder.append(12L, null, "c".getBytes());
    builder.close();
    buffer.flip();
    ProduceRequest.Builder requestBuilder = ProduceRequest.forCurrentMagic(new ProduceRequestData().setTopicData(new ProduceRequestData.TopicProduceDataCollection(Collections.singletonList(new ProduceRequestData.TopicProduceData().setName("test").setPartitionData(Collections.singletonList(new ProduceRequestData.PartitionProduceData().setIndex(0).setRecords(MemoryRecords.readableRecords(buffer))))).iterator())).setAcks((short) 1).setTimeoutMs(5000));
    assertThrowsForAllVersions(requestBuilder, InvalidRecordException.class);
}
Also used : ProduceRequestData(org.apache.kafka.common.message.ProduceRequestData) MemoryRecordsBuilder(org.apache.kafka.common.record.MemoryRecordsBuilder) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.jupiter.api.Test)

Aggregations

MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)97 ByteBuffer (java.nio.ByteBuffer)50 MemoryRecords (org.apache.kafka.common.record.MemoryRecords)36 TopicPartition (org.apache.kafka.common.TopicPartition)30 HashMap (java.util.HashMap)26 SimpleRecord (org.apache.kafka.common.record.SimpleRecord)25 Test (org.junit.jupiter.api.Test)25 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)17 List (java.util.List)17 Record (org.apache.kafka.common.record.Record)17 LinkedHashMap (java.util.LinkedHashMap)16 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)14 MetricName (org.apache.kafka.common.MetricName)14 KafkaMetric (org.apache.kafka.common.metrics.KafkaMetric)14 Arrays.asList (java.util.Arrays.asList)10 CompressionType (org.apache.kafka.common.record.CompressionType)10 RecordBatch (org.apache.kafka.common.record.RecordBatch)10 Header (org.apache.kafka.common.header.Header)9 EndTransactionMarker (org.apache.kafka.common.record.EndTransactionMarker)9