use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateFetchDataExtensionWithNullKeyAndNullHeaderValue.
@Test
public void shouldGenerateFetchDataExtensionWithNullKeyAndNullHeaderValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).fetch().timestamp(12345678L).partition(0, 0L).key(null).header("name", null).build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.FETCH.value(), dataEx.kind());
final KafkaFetchDataExFW fetchDataEx = dataEx.fetch();
assertEquals(12345678L, fetchDataEx.timestamp());
final KafkaOffsetFW partition = fetchDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
assertNull(fetchDataEx.key().value());
final MutableInteger headersCount = new MutableInteger();
fetchDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(fetchDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(h.value())));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateProduceDataExtensionWithNullKeyAndNullHeaderValue.
@Test
public void shouldGenerateProduceDataExtensionWithNullKeyAndNullHeaderValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).produce().timestamp(12345678L).sequence(0).key(null).header("name", null).build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.PRODUCE.value(), dataEx.kind());
final KafkaProduceDataExFW produceDataEx = dataEx.produce();
assertEquals(12345678L, produceDataEx.timestamp());
assertEquals(0, produceDataEx.sequence());
assertNull(produceDataEx.key().value());
final MutableInteger headersCount = new MutableInteger();
produceDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(produceDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(h.value())));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateFetchBeginExtensionWithNullKeyAndNullHeaderValue.
@Test
public void shouldGenerateFetchBeginExtensionWithNullKeyAndNullHeaderValue() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).fetch().topic("topic").partition(0, 0L).filter().key(null).build().filter().header("name", null).build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.FETCH.value(), beginEx.kind());
final KafkaFetchBeginExFW fetchBeginEx = beginEx.fetch();
assertEquals("topic", fetchBeginEx.topic().asString());
final KafkaOffsetFW partition = fetchBeginEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger filterCount = new MutableInteger();
fetchBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(fetchBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == KEY.value() && Objects.isNull(c.key().value())) != null));
assertNotNull(fetchBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(c.header().value())) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldMatchMergedDataExtensionHeader.
@Test
public void shouldMatchMergedDataExtensionHeader() throws Exception {
BytesMatcher matcher = KafkaFunctions.matchDataEx().merged().header("name", "value").build().build();
ByteBuffer byteBuf = ByteBuffer.allocate(1024);
new KafkaDataExFW.Builder().wrap(new UnsafeBuffer(byteBuf), 0, byteBuf.capacity()).typeId(0x01).merged(f -> f.timestamp(12345678L).partition(p -> p.partitionId(0).partitionOffset(0L)).progressItem(p -> p.partitionId(0).partitionOffset(1L)).key(k -> k.length(5).value(v -> v.set("match".getBytes(UTF_8)))).delta(d -> d.type(t -> t.set(KafkaDeltaType.NONE))).headersItem(h -> h.nameLen(4).name(n -> n.set("name".getBytes(UTF_8))).valueLen(5).value(v -> v.set("value".getBytes(UTF_8))))).build();
assertNotNull(matcher.match(byteBuf));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedDataExtensionWithIntValue.
@Test
public void shouldGenerateMergedDataExtensionWithIntValue() {
byte[] build = KafkaFunctions.dataEx().typeId(0x01).merged().timestamp(12345678L).partition(0, 0L).progress(0, 1L).key("match").headerInt("name", 1).build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaDataExFW dataEx = new KafkaDataExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, dataEx.typeId());
assertEquals(KafkaApi.MERGED.value(), dataEx.kind());
final KafkaMergedDataExFW mergedDataEx = dataEx.merged();
assertEquals(12345678L, mergedDataEx.timestamp());
final KafkaOffsetFW partition = mergedDataEx.partition();
assertEquals(0, partition.partitionId());
assertEquals(0L, partition.partitionOffset());
final MutableInteger progressCount = new MutableInteger();
mergedDataEx.progress().forEach(f -> progressCount.value++);
assertEquals(1, progressCount.value);
assertNotNull(mergedDataEx.progress().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
assertEquals("match", mergedDataEx.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
final MutableInteger headersCount = new MutableInteger();
mergedDataEx.headers().forEach(f -> headersCount.value++);
assertEquals(1, headersCount.value);
assertNotNull(mergedDataEx.headers().matchFirst(h -> "name".equals(h.name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && h.value().get((b, o, m) -> b.getInt(o)) == 1) != null);
}
Aggregations