use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.
the class LegacyRecord method write.
private static void write(ByteBuffer buffer, byte magic, long timestamp, ByteBuffer key, ByteBuffer value, CompressionType compressionType, TimestampType timestampType) {
try {
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
write(out, magic, timestamp, key, value, compressionType, timestampType);
} catch (IOException e) {
throw new KafkaException(e);
}
}
use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.
the class SimpleRecordTest method testCompressedIterationWithEmptyRecords.
@Test(expected = InvalidRecordException.class)
public void testCompressedIterationWithEmptyRecords() throws Exception {
ByteBuffer emptyCompressedValue = ByteBuffer.allocate(64);
OutputStream gzipOutput = CompressionType.GZIP.wrapForOutput(new ByteBufferOutputStream(emptyCompressedValue), Record.MAGIC_VALUE_V1, 64);
gzipOutput.close();
emptyCompressedValue.flip();
ByteBuffer buffer = ByteBuffer.allocate(128);
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
LogEntry.writeHeader(out, 0L, Record.RECORD_OVERHEAD_V1 + emptyCompressedValue.remaining());
Record.write(out, Record.CURRENT_MAGIC_VALUE, 1L, null, Utils.toArray(emptyCompressedValue), CompressionType.GZIP, TimestampType.CREATE_TIME);
buffer.flip();
MemoryRecords records = MemoryRecords.readableRecords(buffer);
for (Record record : records.records()) fail("Iteration should have caused invalid record error");
}
use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.
the class FetcherTest method testParseInvalidRecord.
@Test
public void testParseInvalidRecord() throws Exception {
ByteBuffer buffer = ByteBuffer.allocate(1024);
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
byte magic = Record.CURRENT_MAGIC_VALUE;
byte[] key = "foo".getBytes();
byte[] value = "baz".getBytes();
long offset = 0;
long timestamp = 500L;
int size = Record.recordSize(key, value);
byte attributes = Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME);
long crc = Record.computeChecksum(magic, attributes, timestamp, key, value);
// write one valid record
out.writeLong(offset);
out.writeInt(size);
Record.write(out, magic, crc, Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
// and one invalid record (note the crc)
out.writeLong(offset);
out.writeInt(size);
Record.write(out, magic, crc + 1, Record.computeAttributes(magic, CompressionType.NONE, TimestampType.CREATE_TIME), timestamp, key, value);
buffer.flip();
subscriptions.assignFromUser(singleton(tp));
subscriptions.seek(tp, 0);
// normal fetch
assertEquals(1, fetcher.sendFetches());
client.prepareResponse(fetchResponse(MemoryRecords.readableRecords(buffer), Errors.NONE, 100L, 0));
consumerClient.poll(0);
try {
fetcher.fetchedRecords();
fail("fetchedRecords should have raised");
} catch (KafkaException e) {
// the position should not advance since no data has been returned
assertEquals(0, subscriptions.position(tp).longValue());
}
}
use of org.apache.kafka.common.utils.ByteBufferOutputStream in project kafka by apache.
the class Record method write.
private static void write(ByteBuffer buffer, byte magic, long timestamp, ByteBuffer key, ByteBuffer value, CompressionType compressionType, TimestampType timestampType) {
try {
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
write(out, magic, timestamp, key, value, compressionType, timestampType);
} catch (IOException e) {
throw new KafkaException(e);
}
}
use of org.apache.kafka.common.utils.ByteBufferOutputStream in project apache-kafka-on-k8s by banzaicloud.
the class SimpleLegacyRecordTest method testCompressedIterationWithNullValue.
@Test(expected = InvalidRecordException.class)
public void testCompressedIterationWithNullValue() throws Exception {
ByteBuffer buffer = ByteBuffer.allocate(128);
DataOutputStream out = new DataOutputStream(new ByteBufferOutputStream(buffer));
AbstractLegacyRecordBatch.writeHeader(out, 0L, LegacyRecord.RECORD_OVERHEAD_V1);
LegacyRecord.write(out, RecordBatch.MAGIC_VALUE_V1, 1L, (byte[]) null, null, CompressionType.GZIP, TimestampType.CREATE_TIME);
buffer.flip();
MemoryRecords records = MemoryRecords.readableRecords(buffer);
if (records.records().iterator().hasNext())
fail("Iteration should have caused invalid record error");
}
Aggregations