use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaDataExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedDataExtension.
@Test
public void shouldGenerateMergedDataExtension() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).merged().timestamp(12345678L).partition(0, 0L).progress(0, 1L).key("match").header("name", "value").build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.MERGED.value(), dataEx.kind());
final KafkaMergedDataExFW mergedDataEx = dataEx.merged();
assertEquals(12345678L, mergedDataEx.timestamp());
final KafkaOffsetFW partition = mergedDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger progressCount = new MutableInteger();
mergedDataEx.progress().forEach(f -> progressCount.value++);
assertEquals(1, progressCount.value);
assertNotNull(mergedDataEx.progress().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
assertEquals("match", mergedDataEx.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
final MutableInteger headersCount = new MutableInteger();
mergedDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(mergedDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(h.value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null);
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaDataExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedDataExtensionWithNullKeyAndNullByteArrayHeaderValue.
@Test
public void shouldGenerateMergedDataExtensionWithNullKeyAndNullByteArrayHeaderValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).merged().timestamp(12345678L).partition(0, 0L).progress(0, 1L).key(null).headerBytes("name", null).build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.MERGED.value(), dataEx.kind());
final KafkaMergedDataExFW mergedDataEx = dataEx.merged();
assertEquals(12345678L, mergedDataEx.timestamp());
final KafkaOffsetFW partition = mergedDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger progressCount = new MutableInteger();
mergedDataEx.progress().forEach(f -> progressCount.value++);
assertEquals(1, progressCount.value);
assertNotNull(mergedDataEx.progress().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
assertNull(mergedDataEx.key().value());
final MutableInteger headersCount = new MutableInteger();
mergedDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(mergedDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(h.value())));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaDataExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedDataExtensionWithShortValue.
@Test
public void shouldGenerateMergedDataExtensionWithShortValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).merged().timestamp(12345678L).partition(0, 0L).progress(0, 1L).key("match").headerShort("name", (short) 1).build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.MERGED.value(), dataEx.kind());
final KafkaMergedDataExFW mergedDataEx = dataEx.merged();
assertEquals(12345678L, mergedDataEx.timestamp());
final KafkaOffsetFW partition = mergedDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger progressCount = new MutableInteger();
mergedDataEx.progress().forEach(f -> progressCount.value++);
assertEquals(1, progressCount.value);
assertNotNull(mergedDataEx.progress().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
assertEquals("match", mergedDataEx.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
final MutableInteger headersCount = new MutableInteger();
mergedDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(mergedDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && h.value().get((b, o, m) -> b.getShort(o)) == (short) 1) != null);
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaDataExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedDataExtensionWithNullValue.
@Test
public void shouldGenerateMergedDataExtensionWithNullValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).merged().timestamp(12345678L).partition(0, 0L).progress(0, 1L).key("match").headerNull("name").build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.MERGED.value(), dataEx.kind());
final KafkaMergedDataExFW mergedDataEx = dataEx.merged();
assertEquals(12345678L, mergedDataEx.timestamp());
final KafkaOffsetFW partition = mergedDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger progressCount = new MutableInteger();
mergedDataEx.progress().forEach(f -> progressCount.value++);
assertEquals(1, progressCount.value);
assertNotNull(mergedDataEx.progress().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
assertEquals("match", mergedDataEx.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
final MutableInteger headersCount = new MutableInteger();
mergedDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(mergedDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(h.value())));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaDataExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateDescribeDataExtension.
@Test
public void shouldGenerateDescribeDataExtension() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).describe().config("cleanup.policy", "compact").build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.DESCRIBE.value(), dataEx.kind());
final KafkaDescribeDataExFW describeDataEx = dataEx.describe();
final MutableInteger configsCount = new MutableInteger();
describeDataEx.configs().forEach(f -> configsCount.value++);
assertEquals(1, configsCount.value);
assertNotNull(describeDataEx.configs().matchFirst(c -> "cleanup.policy".equals(c.name().asString()) && "compact".equals(c.value().asString())));
}
Aggregations