Search in sources :

Example 31 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project kafka by apache.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse(boolean includeAborted) {
    LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> responseData = new LinkedHashMap<>();
    Uuid topicId = Uuid.randomUuid();
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    responseData.put(new TopicIdPartition(topicId, new TopicPartition("test", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(1000000).setLogStartOffset(0).setRecords(records));
    List<FetchResponseData.AbortedTransaction> abortedTransactions = emptyList();
    if (includeAborted) {
        abortedTransactions = singletonList(new FetchResponseData.AbortedTransaction().setProducerId(234L).setFirstOffset(999L));
    }
    responseData.put(new TopicIdPartition(topicId, new TopicPartition("test", 1)), new FetchResponseData.PartitionData().setPartitionIndex(1).setHighWatermark(1000000).setLogStartOffset(0).setAbortedTransactions(abortedTransactions));
    return FetchResponse.parse(FetchResponse.of(Errors.NONE, 25, INVALID_SESSION_ID, responseData).serialize(FETCH.latestVersion()), FETCH.latestVersion());
}
Also used : FetchResponseData(org.apache.kafka.common.message.FetchResponseData) Uuid(org.apache.kafka.common.Uuid) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) TopicIdPartition(org.apache.kafka.common.TopicIdPartition) LinkedHashMap(java.util.LinkedHashMap) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 32 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project kafka by apache.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse(int sessionId) {
    LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> responseData = new LinkedHashMap<>();
    Map<String, Uuid> topicIds = new HashMap<>();
    topicIds.put("test", Uuid.randomUuid());
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    responseData.put(new TopicIdPartition(topicIds.get("test"), new TopicPartition("test", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(1000000).setLogStartOffset(0).setRecords(records));
    List<FetchResponseData.AbortedTransaction> abortedTransactions = singletonList(new FetchResponseData.AbortedTransaction().setProducerId(234L).setFirstOffset(999L));
    responseData.put(new TopicIdPartition(topicIds.get("test"), new TopicPartition("test", 1)), new FetchResponseData.PartitionData().setPartitionIndex(1).setHighWatermark(1000000).setLogStartOffset(0).setAbortedTransactions(abortedTransactions));
    return FetchResponse.parse(FetchResponse.of(Errors.NONE, 25, sessionId, responseData).serialize(FETCH.latestVersion()), FETCH.latestVersion());
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) TopicIdPartition(org.apache.kafka.common.TopicIdPartition) LinkedHashMap(java.util.LinkedHashMap) FetchResponseData(org.apache.kafka.common.message.FetchResponseData) Uuid(org.apache.kafka.common.Uuid) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 33 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project kafka by apache.

the class RequestResponseTest method createProduceRequest.

private ProduceRequest createProduceRequest(short version) {
    if (version < 2) {
        MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
        ProduceRequestData data = new ProduceRequestData().setAcks((short) -1).setTimeoutMs(123).setTopicData(new ProduceRequestData.TopicProduceDataCollection(singletonList(new ProduceRequestData.TopicProduceData().setName("topic1").setPartitionData(singletonList(new ProduceRequestData.PartitionProduceData().setIndex(1).setRecords(records)))).iterator()));
        return new ProduceRequest.Builder(version, version, data).build(version);
    }
    byte magic = version == 2 ? RecordBatch.MAGIC_VALUE_V1 : RecordBatch.MAGIC_VALUE_V2;
    MemoryRecords records = MemoryRecords.withRecords(magic, CompressionType.NONE, new SimpleRecord("woot".getBytes()));
    return ProduceRequest.forMagic(magic, new ProduceRequestData().setTopicData(new ProduceRequestData.TopicProduceDataCollection(singletonList(new ProduceRequestData.TopicProduceData().setName("test").setPartitionData(singletonList(new ProduceRequestData.PartitionProduceData().setIndex(0).setRecords(records)))).iterator())).setAcks((short) 1).setTimeoutMs(5000).setTransactionalId(version >= 3 ? "transactionalId" : null)).build(version);
}
Also used : ProduceRequestData(org.apache.kafka.common.message.ProduceRequestData) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 34 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project kafka by apache.

the class RequestResponseTest method testFetchResponseV4.

@Test
public void testFetchResponseV4() {
    LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> responseData = new LinkedHashMap<>();
    Map<Uuid, String> topicNames = new HashMap<>();
    topicNames.put(Uuid.randomUuid(), "bar");
    topicNames.put(Uuid.randomUuid(), "foo");
    MemoryRecords records = MemoryRecords.readableRecords(ByteBuffer.allocate(10));
    List<FetchResponseData.AbortedTransaction> abortedTransactions = asList(new FetchResponseData.AbortedTransaction().setProducerId(10).setFirstOffset(100), new FetchResponseData.AbortedTransaction().setProducerId(15).setFirstOffset(50));
    // Use zero UUID since this is an old request version.
    responseData.put(new TopicIdPartition(Uuid.ZERO_UUID, new TopicPartition("bar", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(1000000).setAbortedTransactions(abortedTransactions).setRecords(records));
    responseData.put(new TopicIdPartition(Uuid.ZERO_UUID, new TopicPartition("bar", 1)), new FetchResponseData.PartitionData().setPartitionIndex(1).setHighWatermark(900000).setLastStableOffset(5).setRecords(records));
    responseData.put(new TopicIdPartition(Uuid.ZERO_UUID, new TopicPartition("foo", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(70000).setLastStableOffset(6).setRecords(records));
    FetchResponse response = FetchResponse.of(Errors.NONE, 10, INVALID_SESSION_ID, responseData);
    FetchResponse deserialized = FetchResponse.parse(response.serialize((short) 4), (short) 4);
    assertEquals(responseData.entrySet().stream().collect(Collectors.toMap(e -> e.getKey().topicPartition(), Map.Entry::getValue)), deserialized.responseData(topicNames, (short) 4));
}
Also used : LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) TopicIdPartition(org.apache.kafka.common.TopicIdPartition) LinkedHashMap(java.util.LinkedHashMap) FetchResponseData(org.apache.kafka.common.message.FetchResponseData) AccessControlEntry(org.apache.kafka.common.acl.AccessControlEntry) Uuid(org.apache.kafka.common.Uuid) TopicPartition(org.apache.kafka.common.TopicPartition) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) Test(org.junit.jupiter.api.Test)

Example 35 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project kafka by apache.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse(short version) {
    FetchResponseData data = new FetchResponseData();
    if (version > 0) {
        data.setThrottleTimeMs(345);
    }
    if (version > 6) {
        data.setErrorCode(Errors.NONE.code()).setSessionId(123);
    }
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    FetchResponseData.PartitionData partition = new FetchResponseData.PartitionData().setPartitionIndex(0).setErrorCode(Errors.NONE.code()).setHighWatermark(123L).setRecords(records);
    if (version > 3) {
        partition.setLastStableOffset(234L);
    }
    if (version > 4) {
        partition.setLogStartOffset(456L);
    }
    if (version > 10) {
        partition.setPreferredReadReplica(1);
    }
    if (version > 11) {
        partition.setDivergingEpoch(new FetchResponseData.EpochEndOffset().setEndOffset(1L).setEpoch(2)).setSnapshotId(new FetchResponseData.SnapshotId().setEndOffset(1L).setEndOffset(2)).setCurrentLeader(new FetchResponseData.LeaderIdAndEpoch().setLeaderEpoch(1).setLeaderId(2));
    }
    FetchResponseData.FetchableTopicResponse response = new FetchResponseData.FetchableTopicResponse().setTopic("topic").setPartitions(singletonList(partition));
    if (version > 12) {
        response.setTopicId(Uuid.randomUuid());
    }
    data.setResponses(singletonList(response));
    return new FetchResponse(data);
}
Also used : FetchResponseData(org.apache.kafka.common.message.FetchResponseData) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Aggregations

MemoryRecords (org.apache.kafka.common.record.MemoryRecords)108 TopicPartition (org.apache.kafka.common.TopicPartition)59 Test (org.junit.jupiter.api.Test)43 SimpleRecord (org.apache.kafka.common.record.SimpleRecord)40 ByteBuffer (java.nio.ByteBuffer)34 ArrayList (java.util.ArrayList)28 List (java.util.List)27 Test (org.junit.Test)27 HashMap (java.util.HashMap)26 LinkedHashMap (java.util.LinkedHashMap)23 MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)23 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)18 FetchResponseData (org.apache.kafka.common.message.FetchResponseData)16 Collections.singletonList (java.util.Collections.singletonList)15 Record (org.apache.kafka.common.record.Record)15 Arrays.asList (java.util.Arrays.asList)14 Collections.emptyList (java.util.Collections.emptyList)14 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)14 Metrics (org.apache.kafka.common.metrics.Metrics)12 MutableRecordBatch (org.apache.kafka.common.record.MutableRecordBatch)11