Search in sources :

Example 21 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project apache-kafka-on-k8s by banzaicloud.

the class CompressionTypeTest method testLZ4FramingMagicV0.

@Test
public void testLZ4FramingMagicV0() {
    ByteBuffer buffer = ByteBuffer.allocate(256);
    KafkaLZ4BlockOutputStream out = (KafkaLZ4BlockOutputStream) CompressionType.LZ4.wrapForOutput(new ByteBufferOutputStream(buffer), RecordBatch.MAGIC_VALUE_V0);
    assertTrue(out.useBrokenFlagDescriptorChecksum());
    buffer.rewind();
    KafkaLZ4BlockInputStream in = (KafkaLZ4BlockInputStream) CompressionType.LZ4.wrapForInput(buffer, RecordBatch.MAGIC_VALUE_V0, BufferSupplier.NO_CACHING);
    assertTrue(in.ignoreFlagDescriptorChecksum());
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 22 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project apache-kafka-on-k8s by banzaicloud.

the class DefaultRecordTest method testSerdeNoSequence.

@Test
public void testSerdeNoSequence() throws IOException {
    ByteBuffer key = ByteBuffer.wrap("hi".getBytes());
    ByteBuffer value = ByteBuffer.wrap("there".getBytes());
    long baseOffset = 37;
    int offsetDelta = 10;
    long baseTimestamp = System.currentTimeMillis();
    long timestampDelta = 323;
    ByteBufferOutputStream out = new ByteBufferOutputStream(1024);
    DefaultRecord.writeTo(new DataOutputStream(out), offsetDelta, timestampDelta, key, value, new Header[0]);
    ByteBuffer buffer = out.buffer();
    buffer.flip();
    DefaultRecord record = DefaultRecord.readFrom(buffer, baseOffset, baseTimestamp, RecordBatch.NO_SEQUENCE, null);
    assertNotNull(record);
    assertEquals(RecordBatch.NO_SEQUENCE, record.sequence());
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 23 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project apache-kafka-on-k8s by banzaicloud.

the class FetcherTest method testParseCorruptedRecord.

@Test
public void testParseCorruptedRecord() throws Exception {
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
    byte magic = RecordBatch.MAGIC_VALUE_V1;
    byte[] key = "foo".getBytes();
    byte[] value = "baz".getBytes();
    long offset = 0;
    long timestamp = 500L;
    int size = LegacyRecord.recordSize(magic, key.length, value.length);
    byte attributes = LegacyRecord.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME);
    long crc = LegacyRecord.computeChecksum(magic, attributes, timestamp, key, value);
    // write one valid record
    out.writeLong(offset);
    out.writeInt(size);
    LegacyRecord.write(out, magic, crc, LegacyRecord.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    // and one invalid record (note the crc)
    out.writeLong(offset + 1);
    out.writeInt(size);
    LegacyRecord.write(out, magic, crc + 1, LegacyRecord.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    // write one valid record
    out.writeLong(offset + 2);
    out.writeInt(size);
    LegacyRecord.write(out, magic, crc, LegacyRecord.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    // Write a record whose size field is invalid.
    out.writeLong(offset + 3);
    out.writeInt(1);
    // write one valid record
    out.writeLong(offset + 4);
    out.writeInt(size);
    LegacyRecord.write(out, magic, crc, LegacyRecord.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    buffer.flip();
    subscriptions.assignFromUser(singleton(tp0));
    subscriptions.seek(tp0, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    client.prepareResponse(fullFetchResponse(tp0, MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0));
    consumerClient.poll(0);
    // the first fetchedRecords() should return the first valid message
    assertEquals(1, fetcher.fetchedRecords().get(tp0).size());
    assertEquals(1, subscriptions.position(tp0).longValue());
    ensureBlockOnRecord(1L);
    seekAndConsumeRecord(buffer, 2L);
    ensureBlockOnRecord(3L);
    try {
        // For a record that cannot be retrieved from the iterator, we cannot seek over it within the batch.
        seekAndConsumeRecord(buffer, 4L);
        fail("Should have thrown exception when fail to retrieve a record from iterator.");
    } catch (KafkaException ke) {
    // let it go
    }
    ensureBlockOnRecord(4L);
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) KafkaException(org.apache.kafka.common.KafkaException) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 24 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class SimpleLegacyRecordTest method testCompressedIterationWithNullValue.

@Test
public void testCompressedIterationWithNullValue() throws Exception {
    ByteBuffer buffer = ByteBuffer.allocate(128);
    DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
    AbstractLegacyRecordBatch.writeHeader(out, 0L, LegacyRecord.RECORD_OVERHEAD_V1);
    LegacyRecord.write(out, RecordBatch.MAGIC_VALUE_V1, 1L, (byte[]) null, null, CompressionType.GZIP, TimestampType.CREATE_TIME);
    buffer.flip();
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    assertThrows(InvalidRecordException.class, () -> records.records().iterator().hasNext());
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.jupiter.api.Test)

Example 25 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class MemoryRecordsBuilderTest method testRecordTimestampsWithDeleteHorizon.

@ParameterizedTest
@ArgumentsSource(V2MemoryRecordsBuilderArgumentsProvider.class)
public void testRecordTimestampsWithDeleteHorizon(Args args) {
    long deleteHorizon = 100;
    int payloadLen = 1024 * 1024;
    ByteBuffer buffer = ByteBuffer.allocate(payloadLen * 2);
    ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(buffer);
    MemoryRecordsBuilder builder = new MemoryRecordsBuilder(byteBufferOutputStream, args.magic, args.compressionType, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, 0, deleteHorizon);
    builder.append(50L, "0".getBytes(), "0".getBytes());
    builder.append(100L, "1".getBytes(), null);
    builder.append(150L, "2".getBytes(), "2".getBytes());
    MemoryRecords records = builder.build();
    List<MutableRecordBatch> batches = TestUtils.toList(records.batches());
    assertEquals(OptionalLong.of(deleteHorizon), batches.get(0).deleteHorizonMs());
    CloseableIterator<Record> recordIterator = batches.get(0).streamingIterator(BufferSupplier.create());
    Record record = recordIterator.next();
    assertEquals(50L, record.timestamp());
    record = recordIterator.next();
    assertEquals(100L, record.timestamp());
    record = recordIterator.next();
    assertEquals(150L, record.timestamp());
    recordIterator.close();
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) ByteBuffer(java.nio.ByteBuffer) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) ArgumentsSource(org.junit.jupiter.params.provider.ArgumentsSource)

Aggregations

ByteBufferOutputStream (org.apache.kafka.common.utils.ByteBufferOutputStream)31 ByteBuffer (java.nio.ByteBuffer)26 DataOutputStream (java.io.DataOutputStream)20 Test (org.junit.Test)13 Test (org.junit.jupiter.api.Test)10 KafkaException (org.apache.kafka.common.KafkaException)8 Header (org.apache.kafka.common.header.Header)6 RecordHeader (org.apache.kafka.common.header.internals.RecordHeader)6 IOException (java.io.IOException)3 OutputStream (java.io.OutputStream)3 ArrayList (java.util.ArrayList)2 KafkaLZ4BlockInputStream (org.apache.kafka.common.compress.KafkaLZ4BlockInputStream)2 KafkaLZ4BlockOutputStream (org.apache.kafka.common.compress.KafkaLZ4BlockOutputStream)2 BatchRetention (org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetention)2 MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)2 SnapshotFooterRecord (org.apache.kafka.common.message.SnapshotFooterRecord)1 SnapshotHeaderRecord (org.apache.kafka.common.message.SnapshotHeaderRecord)1 BatchRetentionResult (org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetentionResult)1 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)1 ArgumentsSource (org.junit.jupiter.params.provider.ArgumentsSource)1