use of org.apache.kafka.common.Uuid in project kafka by apache.
the class SimpleExampleMessageTest method testTaggedUuid.
@Test
public void testTaggedUuid() {
testRoundTrip(new SimpleExampleMessageData(), message -> assertEquals(Uuid.fromString("H3KKO4NTRPaCWtEmm3vW7A"), message.taggedUuid()));
Uuid randomUuid = Uuid.randomUuid();
testRoundTrip(new SimpleExampleMessageData().setTaggedUuid(randomUuid), message -> assertEquals(randomUuid, message.taggedUuid()));
}
use of org.apache.kafka.common.Uuid in project kafka by apache.
the class SimpleExampleMessageTest method shouldRoundTripFieldThroughBuffer.
@Test
public void shouldRoundTripFieldThroughBuffer() {
final Uuid uuid = Uuid.randomUuid();
final ByteBuffer buf = ByteBuffer.wrap(new byte[] { 1, 2, 3 });
final SimpleExampleMessageData out = new SimpleExampleMessageData();
out.setProcessId(uuid);
out.setZeroCopyByteBuffer(buf);
final ByteBuffer buffer = MessageUtil.toByteBuffer(out, (short) 1);
final SimpleExampleMessageData in = new SimpleExampleMessageData();
in.read(new ByteBufferAccessor(buffer), (short) 1);
buf.rewind();
assertEquals(uuid, in.processId());
assertEquals(buf, in.zeroCopyByteBuffer());
assertEquals(ByteUtils.EMPTY_BUF, in.nullableZeroCopyByteBuffer());
}
use of org.apache.kafka.common.Uuid in project kafka by apache.
the class SimpleExampleMessageTest method shouldImplementEqualsAndHashCode.
@Test
public void shouldImplementEqualsAndHashCode() {
final Uuid uuid = Uuid.randomUuid();
final ByteBuffer buf = ByteBuffer.wrap(new byte[] { 1, 2, 3 });
final SimpleExampleMessageData a = new SimpleExampleMessageData();
a.setProcessId(uuid);
a.setZeroCopyByteBuffer(buf);
final SimpleExampleMessageData b = new SimpleExampleMessageData();
b.setProcessId(uuid);
b.setZeroCopyByteBuffer(buf);
assertEquals(a, b);
assertEquals(a.hashCode(), b.hashCode());
// just tagging this on here
assertEquals(a.toString(), b.toString());
a.setNullableZeroCopyByteBuffer(buf);
b.setNullableZeroCopyByteBuffer(buf);
assertEquals(a, b);
assertEquals(a.hashCode(), b.hashCode());
assertEquals(a.toString(), b.toString());
a.setNullableZeroCopyByteBuffer(null);
b.setNullableZeroCopyByteBuffer(null);
assertEquals(a, b);
assertEquals(a.hashCode(), b.hashCode());
assertEquals(a.toString(), b.toString());
}
use of org.apache.kafka.common.Uuid in project kafka by apache.
the class RequestResponseTest method createFetchResponse.
private FetchResponse createFetchResponse(boolean includeAborted) {
LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> responseData = new LinkedHashMap<>();
Uuid topicId = Uuid.randomUuid();
MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
responseData.put(new TopicIdPartition(topicId, new TopicPartition("test", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(1000000).setLogStartOffset(0).setRecords(records));
List<FetchResponseData.AbortedTransaction> abortedTransactions = emptyList();
if (includeAborted) {
abortedTransactions = singletonList(new FetchResponseData.AbortedTransaction().setProducerId(234L).setFirstOffset(999L));
}
responseData.put(new TopicIdPartition(topicId, new TopicPartition("test", 1)), new FetchResponseData.PartitionData().setPartitionIndex(1).setHighWatermark(1000000).setLogStartOffset(0).setAbortedTransactions(abortedTransactions));
return FetchResponse.parse(FetchResponse.of(Errors.NONE, 25, INVALID_SESSION_ID, responseData).serialize(FETCH.latestVersion()), FETCH.latestVersion());
}
use of org.apache.kafka.common.Uuid in project kafka by apache.
the class RequestResponseTest method createFetchResponse.
private FetchResponse createFetchResponse(int sessionId) {
LinkedHashMap<TopicIdPartition, FetchResponseData.PartitionData> responseData = new LinkedHashMap<>();
Map<String, Uuid> topicIds = new HashMap<>();
topicIds.put("test", Uuid.randomUuid());
MemoryRecords records = MemoryRecords.withRecords(CompressionType.NONE, new SimpleRecord("blah".getBytes()));
responseData.put(new TopicIdPartition(topicIds.get("test"), new TopicPartition("test", 0)), new FetchResponseData.PartitionData().setPartitionIndex(0).setHighWatermark(1000000).setLogStartOffset(0).setRecords(records));
List<FetchResponseData.AbortedTransaction> abortedTransactions = singletonList(new FetchResponseData.AbortedTransaction().setProducerId(234L).setFirstOffset(999L));
responseData.put(new TopicIdPartition(topicIds.get("test"), new TopicPartition("test", 1)), new FetchResponseData.PartitionData().setPartitionIndex(1).setHighWatermark(1000000).setLogStartOffset(0).setAbortedTransactions(abortedTransactions));
return FetchResponse.parse(FetchResponse.of(Errors.NONE, 25, sessionId, responseData).serialize(FETCH.latestVersion()), FETCH.latestVersion());
}
Aggregations