Search in sources :

Example 16 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class LegacyRecord method write.

private static void write(ByteBuffer buffer, byte magic, long timestamp, ByteBuffer key, ByteBuffer value, CompressionType compressionType, TimestampType timestampType) {
    try {
        DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
        write(out, magic, timestamp, key, value, compressionType, timestampType);
    } catch (IOException e) {
        throw new KafkaException(e);
    }
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException)

Example 17 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class SimpleRecordTest method testCompressedIterationWithEmptyRecords.

@Test(expected = InvalidRecordException.class)
public void testCompressedIterationWithEmptyRecords() throws Exception {
    ByteBuffer emptyCompressedValue = ByteBuffer.allocate(64);
    OutputStream gzipOutput = CompressionType.GZIP.wrapForOutput(new ByteBufferOutputStream(emptyCompressedValue), Record.MAGIC_VALUE_V1, 64);
    gzipOutput.close();
    emptyCompressedValue.flip();
    ByteBuffer buffer = ByteBuffer.allocate(128);
    DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
    LogEntry.writeHeader(out, 0L, Record.RECORD_OVERHEAD_V1 + emptyCompressedValue.remaining());
    Record.write(out, Record.CURRENT_MAGIC_VALUE, 1L, null, Utils.toArray(emptyCompressedValue), CompressionType.GZIP, TimestampType.CREATE_TIME);
    buffer.flip();
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    for (Record record : records.records()) fail("Iteration should have caused invalid record error");
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) OutputStream(java.io.OutputStream) DataOutputStream(java.io.DataOutputStream) ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 18 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class FetcherTest method testParseInvalidRecord.

@Test
public void testParseInvalidRecord() throws Exception {
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
    byte magic = Record.CURRENT_MAGIC_VALUE;
    byte[] key = "foo".getBytes();
    byte[] value = "baz".getBytes();
    long offset = 0;
    long timestamp = 500L;
    int size = Record.recordSize(key, value);
    byte attributes = Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME);
    long crc = Record.computeChecksum(magic, attributes, timestamp, key, value);
    // write one valid record
    out.writeLong(offset);
    out.writeInt(size);
    Record.write(out, magic, crc, Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    // and one invalid record (note the crc)
    out.writeLong(offset);
    out.writeInt(size);
    Record.write(out, magic, crc + 1, Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
    buffer.flip();
    subscriptions.assignFromUser(singleton(tp));
    subscriptions.seek(tp, 0);
    // normal fetch
    assertEquals(1, fetcher.sendFetches());
    client.prepareResponse(fetchResponse(MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0));
    consumerClient.poll(0);
    try {
        fetcher.fetchedRecords();
        fail("fetchedRecords should have raised");
    } catch (KafkaException e) {
        // the position should not advance since no data has been returned
        assertEquals(0, subscriptions.position(tp).longValue());
    }
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) KafkaException(org.apache.kafka.common.KafkaException) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Example 19 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.

the class Record method write.

private static void write(ByteBuffer buffer, byte magic, long timestamp, ByteBuffer key, ByteBuffer value, CompressionType compressionType, TimestampType timestampType) {
    try {
        DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
        write(out, magic, timestamp, key, value, compressionType, timestampType);
    } catch (IOException e) {
        throw new KafkaException(e);
    }
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) KafkaException(org.apache.kafka.common.KafkaException) IOException(java.io.IOException)

Example 20 with ByteBufferOutputStream

use of org.apache.kafka.common.utils.ByteBufferOutputStream in project apache-kafka-on-k8s by banzaicloud.

the class SimpleLegacyRecordTest method testCompressedIterationWithNullValue.

@Test(expected = InvalidRecordException.class)
public void testCompressedIterationWithNullValue() throws Exception {
    ByteBuffer buffer = ByteBuffer.allocate(128);
    DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
    AbstractLegacyRecordBatch.writeHeader(out, 0L, LegacyRecord.RECORD_OVERHEAD_V1);
    LegacyRecord.write(out, RecordBatch.MAGIC_VALUE_V1, 1L, (byte[]) null, null, CompressionType.GZIP, TimestampType.CREATE_TIME);
    buffer.flip();
    MemoryRecords records = MemoryRecords.readableRecords(buffer);
    if (records.records().iterator().hasNext())
        fail("Iteration should have caused invalid record error");
}
Also used : ByteBufferOutputStream(org.apache.kafka.common.utils.ByteBufferOutputStream) DataOutputStream(java.io.DataOutputStream) ByteBuffer(java.nio.ByteBuffer) Test(org.junit.Test)

Aggregations

ByteBufferOutputStream (org.apache.kafka.common.utils.ByteBufferOutputStream)31 ByteBuffer (java.nio.ByteBuffer)26 DataOutputStream (java.io.DataOutputStream)20 Test (org.junit.Test)13 Test (org.junit.jupiter.api.Test)10 KafkaException (org.apache.kafka.common.KafkaException)8 Header (org.apache.kafka.common.header.Header)6 RecordHeader (org.apache.kafka.common.header.internals.RecordHeader)6 IOException (java.io.IOException)3 OutputStream (java.io.OutputStream)3 ArrayList (java.util.ArrayList)2 KafkaLZ4BlockInputStream (org.apache.kafka.common.compress.KafkaLZ4BlockInputStream)2 KafkaLZ4BlockOutputStream (org.apache.kafka.common.compress.KafkaLZ4BlockOutputStream)2 BatchRetention (org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetention)2 MemoryRecordsBuilder (org.apache.kafka.common.record.MemoryRecordsBuilder)2 SnapshotFooterRecord (org.apache.kafka.common.message.SnapshotFooterRecord)1 SnapshotHeaderRecord (org.apache.kafka.common.message.SnapshotHeaderRecord)1 BatchRetentionResult (org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetentionResult)1 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)1 ArgumentsSource (org.junit.jupiter.params.provider.ArgumentsSource)1