use of org.apache.kafka.common.protocol.ByteBufferAccessor in project kafka by apache.
the class RequestHeader method parse.
public static RequestHeader parse(ByteBuffer buffer) {
short apiKey = -1;
try {
// We derive the header version from the request api version, so we read that first.
// The request api version is part of `RequestHeaderData`, so we reset the buffer position after the read.
int position = buffer.position();
apiKey = buffer.getShort();
short apiVersion = buffer.getShort();
short headerVersion = ApiKeys.forId(apiKey).requestHeaderVersion(apiVersion);
buffer.position(position);
RequestHeaderData headerData = new RequestHeaderData(new ByteBufferAccessor(buffer), headerVersion);
// However, we treat a null client ID as equivalent to an empty client ID.
if (headerData.clientId() == null) {
headerData.setClientId("");
}
return new RequestHeader(headerData, headerVersion);
} catch (UnsupportedVersionException e) {
throw new InvalidRequestException("Unknown API key " + apiKey, e);
} catch (Throwable ex) {
throw new InvalidRequestException("Error parsing request header. Our best guess of the apiKey is: " + apiKey, ex);
}
}
use of org.apache.kafka.common.protocol.ByteBufferAccessor in project kafka by apache.
the class DefaultKafkaPrincipalBuilder method deserialize.
@Override
public KafkaPrincipal deserialize(byte[] bytes) {
ByteBuffer buffer = ByteBuffer.wrap(bytes);
short version = buffer.getShort();
if (version < DefaultPrincipalData.LOWEST_SUPPORTED_VERSION || version > DefaultPrincipalData.HIGHEST_SUPPORTED_VERSION) {
throw new SerializationException("Invalid principal data version " + version);
}
DefaultPrincipalData data = new DefaultPrincipalData(new ByteBufferAccessor(buffer), version);
return new KafkaPrincipal(data.type(), data.name(), data.tokenAuthenticated());
}
use of org.apache.kafka.common.protocol.ByteBufferAccessor in project kafka by apache.
the class MessageTest method verifyWriteRaisesUve.
private void verifyWriteRaisesUve(short version, String problemText, Message message) {
ObjectSerializationCache cache = new ObjectSerializationCache();
UnsupportedVersionException e = assertThrows(UnsupportedVersionException.class, () -> {
int size = message.size(cache, version);
ByteBuffer buf = ByteBuffer.allocate(size);
ByteBufferAccessor byteBufferAccessor = new ByteBufferAccessor(buf);
message.write(byteBufferAccessor, cache, version);
});
assertTrue(e.getMessage().contains(problemText), "Expected to get an error message about " + problemText + ", but got: " + e.getMessage());
}
use of org.apache.kafka.common.protocol.ByteBufferAccessor in project kafka by apache.
the class RawTaggedFieldWriterTest method testInvalidNextDefinedTag.
@Test
public void testInvalidNextDefinedTag() {
List<RawTaggedField> tags = Arrays.asList(new RawTaggedField(2, new byte[] { 0x1, 0x2, 0x3 }), new RawTaggedField(5, new byte[] { 0x4, 0x5, 0x6 }), new RawTaggedField(7, new byte[] { 0x0 }));
RawTaggedFieldWriter writer = RawTaggedFieldWriter.forFields(tags);
assertEquals(3, writer.numFields());
try {
writer.writeRawTags(new ByteBufferAccessor(ByteBuffer.allocate(1024)), 2);
fail("expected to get RuntimeException");
} catch (RuntimeException e) {
assertEquals("Attempted to use tag 2 as an undefined tag.", e.getMessage());
}
}
use of org.apache.kafka.common.protocol.ByteBufferAccessor in project kafka by apache.
the class RawTaggedFieldWriterTest method testWritingZeroRawTaggedFields.
@Test
public void testWritingZeroRawTaggedFields() {
RawTaggedFieldWriter writer = RawTaggedFieldWriter.forFields(null);
assertEquals(0, writer.numFields());
ByteBufferAccessor accessor = new ByteBufferAccessor(ByteBuffer.allocate(0));
writer.writeRawTags(accessor, Integer.MAX_VALUE);
}
Aggregations