Search in sources :

Example 1 with SimpleRecord

use of org.apache.kafka.common.record.SimpleRecord in project apache-kafka-on-k8s by banzaicloud.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse() {
    LinkedHashMap<TopicPartition, FetchResponse.PartitionData> responseData = new LinkedHashMap<>();
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    responseData.put(new TopicPartition("test", 0), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, null, records));
    List<FetchResponse.AbortedTransaction> abortedTransactions = Collections.singletonList(new FetchResponse.AbortedTransaction(234L, 999L));
    responseData.put(new TopicPartition("test", 1), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, abortedTransactions, MemoryRecords.EMPTY));
    return new FetchResponse(Errors.NONE, responseData, 25, INVALID_SESSION_ID);
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) LinkedHashMap(java.util.LinkedHashMap) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 2 with SimpleRecord

use of org.apache.kafka.common.record.SimpleRecord in project apache-kafka-on-k8s by banzaicloud.

the class RequestResponseTest method createFetchResponse.

private FetchResponse createFetchResponse(int sessionId) {
    LinkedHashMap<TopicPartition, FetchResponse.PartitionData> responseData = new LinkedHashMap<>();
    MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
    responseData.put(new TopicPartition("test", 0), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, null, records));
    List<FetchResponse.AbortedTransaction> abortedTransactions = Collections.singletonList(new FetchResponse.AbortedTransaction(234L, 999L));
    responseData.put(new TopicPartition("test", 1), new FetchResponse.PartitionData(Errors.NONE, 1000000, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0L, abortedTransactions, MemoryRecords.EMPTY));
    return new FetchResponse(Errors.NONE, responseData, 25, sessionId);
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) LinkedHashMap(java.util.LinkedHashMap) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 3 with SimpleRecord

use of org.apache.kafka.common.record.SimpleRecord in project apache-kafka-on-k8s by banzaicloud.

the class RequestResponseTest method createProduceRequest.

private ProduceRequest createProduceRequest(int version) {
    if (version < 2)
        throw new IllegalArgumentException("Produce request version 2 is not supported");
    byte magic = version == 2 ? RecordBatch.MAGIC_VALUE_V1 : RecordBatch.MAGIC_VALUE_V2;
    MemoryRecords records = MemoryRecords.withRecords(magic, CompressionType.NONE, new SimpleRecord("woot".getBytes()));
    Map<TopicPartition, MemoryRecords> produceData = Collections.singletonMap(new TopicPartition("test", 0), records);
    return ProduceRequest.Builder.forMagic(magic, (short) 1, 5000, produceData, "transactionalId").build((short) version);
}
Also used : TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) MemoryRecords(org.apache.kafka.common.record.MemoryRecords)

Example 4 with SimpleRecord

use of org.apache.kafka.common.record.SimpleRecord in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testSkippingAbortedTransactions.

@Test
public void testSkippingAbortedTransactions() {
    Fetcher<byte[], byte[]> fetcher = createFetcher(subscriptions, new Metrics(), new ByteArrayDeserializer(), new ByteArrayDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED);
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    int currentOffset = 0;
    currentOffset += appendTransactionalRecords(buffer, 1L, currentOffset, new SimpleRecord(time.milliseconds(), "key".getBytes(), "value".getBytes()), new SimpleRecord(time.milliseconds(), "key".getBytes(), "value".getBytes()));
    abortTransaction(buffer, 1L, currentOffset);
    buffer.flip();
    List<FetchResponse.AbortedTransaction> abortedTransactions = new ArrayList<>();
    abortedTransactions.add(new FetchResponse.AbortedTransaction(1, 0));
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    assertFalse(fetcher.hasCompletedFetches());
    client.prepareResponse(fullFetchResponseWithAbortedTransactions(records, abortedTransactions, Errors.NONE, 100L, 100L, 0));
    consumerClient.poll(0);
    assertTrue(fetcher.hasCompletedFetches());
    Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> fetchedRecords = fetcher.fetchedRecords();
    assertFalse(fetchedRecords.containsKey(tp0));
}
Also used : ArrayList(java.util.ArrayList) FetchResponse(org.apache.kafka.common.requests.FetchResponse) ByteBuffer(java.nio.ByteBuffer) Metrics(org.apache.kafka.common.metrics.Metrics) TopicPartition(org.apache.kafka.common.TopicPartition) SimpleRecord(org.apache.kafka.common.record.SimpleRecord) List(java.util.List) ArrayList(java.util.ArrayList) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) Test(org.junit.Test)

Example 5 with SimpleRecord

use of org.apache.kafka.common.record.SimpleRecord in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testParseInvalidRecordBatch.

@Test
public void testParseInvalidRecordBatch() throws Exception {
    MemoryRecords records = MemoryRecords.withRecords(RecordBatch.MAGIC_VALUE_V2, 0L, CompressionType.NONE, TimestampType.CREATE_TIME, new SimpleRecord(1L, "a".getBytes(), "1".getBytes()), new SimpleRecord(2L, "b".getBytes(), "2".getBytes()), new SimpleRecord(3L, "c".getBytes(), "3".getBytes()));
    ByteBuffer buffer = records.buffer();
    // flip some bits to fail the crc
    buffer.putInt(32, buffer.get(32) ^ 87238423);
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    client.prepareResponse(fullFetchResponse(tp0, MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0));
    consumerClient.poll(0);
    try {
        fetcher.fetchedRecords();
        fail("fetchedRecords should have raised");
    } catch (KafkaException e) {
        // the position should not advance since no data has been returned
        assertEquals(0, subscriptions.position(tp0).longValue());
    }
}
Also used : SimpleRecord(org.apache.kafka.common.record.SimpleRecord) KafkaException(org.apache.kafka.common.KafkaException) ByteBuffer(java.nio.ByteBuffer) MemoryRecords(org.apache.kafka.common.record.MemoryRecords) Test(org.junit.Test)

Aggregations

SimpleRecord (org.apache.kafka.common.record.SimpleRecord)15 TopicPartition (org.apache.kafka.common.TopicPartition)13 MemoryRecords (org.apache.kafka.common.record.MemoryRecords)12 Test (org.junit.Test)11 ByteBuffer (java.nio.ByteBuffer)9 ArrayList (java.util.ArrayList)9 List (java.util.List)9 Metrics (org.apache.kafka.common.metrics.Metrics)8 FetchResponse (org.apache.kafka.common.requests.FetchResponse)8 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)6 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)6 LinkedHashMap (java.util.LinkedHashMap)3 HashSet (java.util.HashSet)2 LegacyRecord (org.apache.kafka.common.record.LegacyRecord)2 MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)2 Record (org.apache.kafka.common.record.Record)2 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)2 HashMap (java.util.HashMap)1 MockClient (org.apache.kafka.clients.MockClient)1 KafkaException (org.apache.kafka.common.KafkaException)1