Search in sources :

Example 11 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project apache-kafka-on-k8s by banzaicloud.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse(int sessionId) {
    LinkedHashMap<TopicPartition, FetchResponse.PartitionData> responseData = new LinkedHashMap<>();
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    responseData.put(new TopicPartition("test", 0), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, null, records));
    List<FetchResponse.AbortedTransaction> abortedTransactions = Collections.singletonList(new FetchResponse.AbortedTransaction(234L, 999L));
    responseData.put(new TopicPartition("test", 1), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, abortedTransactions, MemoryRecords.EMPTY));
    return new FetchResponse(Errors.NONE, responseData, 25, sessionId);
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) LinkedHashMap(java.util.LinkedHashMap) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 12 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project apache-kafka-on-k8s by banzaicloud.

the class RequestResponseTest method createProduceRequest.

private ProduceRequest createProduceRequest(int version) {
    if (version < 2)
        throw new IllegalArgumentException("Produce request version 2 is not supported");
    byte magic = version == 2 ? RecordBatch.MAGIC_VALUE_V1 : RecordBatch.MAGIC_VALUE_V2;
    MemoryRecords records = MemoryRecords.withRecords(magic, CompressionType.NONE, new SimpleRecord("woot".getBytes()));
    Map<TopicPartition, MemoryRecords> produceData = Collections.singletonMap(new TopicPartition("test", 0), records);
    return ProduceRequest.Builder.forMagic(magic, (short) 1, 5000, produceData, "transactionalId").build((short) version);
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 13 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project apache-kafka-on-k8s by banzaicloud.

the class TransactionManagerTest method produceRequestMatcher.

private MockClient.RequestMatcher produceRequestMatcher(final long pid, final short epoch) {
    return new MockClient.RequestMatcher() {

        @Override
        public boolean matches(AbstractRequest body) {
            ProduceRequest produceRequest = (ProduceRequest) body;
            MemoryRecords records = produceRequest.partitionRecordsOrFail().get(tp0);
            assertNotNull(records);
            Iterator<MutableRecordBatch> batchIterator = records.batches().iterator();
            assertTrue(batchIterator.hasNext());
            MutableRecordBatch batch = batchIterator.next();
            assertFalse(batchIterator.hasNext());
            assertTrue(batch.isTransactional());
            assertEquals(pid, batch.producerId());
            assertEquals(epoch, batch.producerEpoch());
            assertEquals(transactionalId, produceRequest.transactionalId());
            return true;
        }
    };
}
Also used : MutableRecordBatch(org.apache.kafka.common.record.MutableRecordBatch) ProduceRequest(org.apache.kafka.common.requests.ProduceRequest) AbstractRequest(org.apache.kafka.common.requests.AbstractRequest) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 14 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testSkippingAbortedTransactions.

@Test
public void testSkippingAbortedTransactions() {
    Fetcher<byte[], byte[]> fetcher = createFetcher(subscriptions, new Metrics(), new ByteArrayDeserializer(), new ByteArrayDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    int currentOffset = 0;
    currentOffset += appendTransactionalRecords(buffer, 1L, currentOffset, new SimpleRecord(time.milliseconds(), "key".getBytes(), "value".getBytes()), new SimpleRecord(time.milliseconds(), "key".getBytes(), "value".getBytes()));
    abortTransaction(buffer, 1L, currentOffset);
    buffer.flip();
    List<FetchResponse.AbortedTransaction> abortedTransactions = new ArrayList<>();
    abortedTransactions.add(new FetchResponse.AbortedTransaction(1, 0));
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    assertFalse(fetcher.hasCompletedFetches());
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(records, abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(0);
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetcher.fetchedRecords();
    assertFalse(fetchedRecords.containsKey(tp0));
}
Also used : ArrayList(java.util.ArrayList) FetchResponse(org.apache.kafka.common.requests.FetchResponse) ByteBuffer(java.nio.ByteBuffer) Metrics(org.apache.kafka.common.metrics.Metrics) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) List(java.util.List) ArrayList(java.util.ArrayList) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) Test(org.junit.Test)

Example 15 with MemoryRecords

use of org.apache.kafka.common.record.MemoryRecords in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testParseInvalidRecordBatch.

@Test
public void testParseInvalidRecordBatch() throws Exception {
    MemoryRecords records = MemoryRecords.withRecords(RecordBatch.MAGIC_VALUE_V2, 0L, CompressionType.NONE, TimestampType.CREATE_TIME, new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes()));
    ByteBuffer buffer = records.buffer();
    // flip some bits to fail the crc
    buffer.putInt(32, buffer.get(32) ^ 87238423);
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    client.prepareResponse(fullFetchResponse(tp0, MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0));
    consumerClient.poll(0);
    try {
        fetcher.fetchedRecords();
        fail("fetchedRecords should have raised");
    } catch (KafkaException e) {
        // the position should not advance since no data has been returned
        assertEquals(0, subscriptions.position(tp0).longValue());
    }
}
Also used : SimpleRecord(org.apache.kafka.common.record.SimpleRecord) KafkaException(org.apache.kafka.common.KafkaException) ByteBuffer(java.nio.ByteBuffer) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) Test(org.junit.Test)

Aggregations

MemoryRecords (org.apache.kafka.common.record.MemoryRecords)108 TopicPartition (org.apache.kafka.common.TopicPartition)59 Test (org.junit.jupiter.api.Test)43 SimpleRecord (org.apache.kafka.common.record.SimpleRecord)40 ByteBuffer (java.nio.ByteBuffer)34 ArrayList (java.util.ArrayList)28 List (java.util.List)27 Test (org.junit.Test)27 HashMap (java.util.HashMap)26 LinkedHashMap (java.util.LinkedHashMap)23 MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)23 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)18 FetchResponseData (org.apache.kafka.common.message.FetchResponseData)16 Collections.singletonList (java.util.Collections.singletonList)15 Record (org.apache.kafka.common.record.Record)15 Arrays.asList (java.util.Arrays.asList)14 Collections.emptyList (java.util.Collections.emptyList)14 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)14 Metrics (org.apache.kafka.common.metrics.Metrics)12 MutableRecordBatch (org.apache.kafka.common.record.MutableRecordBatch)11