use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtensionWithKeyNotEqualsFilter.
@Test
public void shouldGenerateMergedBeginExtensionWithKeyNotEqualsFilter() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().keyNot("match").build().filter().header("name", "value").build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == NOT.value() && c.not().condition().kind() == KEY.value() && "match".equals(c.not().condition().key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(c.header().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldMatchFetchDataExtension.
@Test
public void shouldMatchFetchDataExtension() throws Exception {
BytesMatcher matcher = KafkaFunctions.matchDataEx().typeId(0x01).fetch().timestamp(12345678L).partition(0, 0L).key("match").header("name", "value").build().build();
ByteBuffer byteBuf = ByteBuffer.allocate(1024);
new KafkaDataExFW.Builder().wrap(new UnsafeBuffer(byteBuf), 0, byteBuf.capacity()).typeId(0x01).fetch(f -> f.timestamp(12345678L).partition(p -> p.partitionId(0).partitionOffset(0L)).key(k -> k.length(5).value(v -> v.set("match".getBytes(UTF_8)))).delta(d -> d.type(t -> t.set(KafkaDeltaType.NONE))).headersItem(h -> h.nameLen(4).name(n -> n.set("name".getBytes(UTF_8))).valueLen(5).value(v -> v.set("value".getBytes(UTF_8))))).build();
assertNotNull(matcher.match(byteBuf));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldMatchMergedDataExtensionTimestamp.
@Test
public void shouldMatchMergedDataExtensionTimestamp() throws Exception {
BytesMatcher matcher = KafkaFunctions.matchDataEx().merged().timestamp(12345678L).build().build();
ByteBuffer byteBuf = ByteBuffer.allocate(1024);
new KafkaDataExFW.Builder().wrap(new UnsafeBuffer(byteBuf), 0, byteBuf.capacity()).typeId(0x01).merged(f -> f.timestamp(12345678L).partition(p -> p.partitionId(0).partitionOffset(0L)).progressItem(p -> p.partitionId(0).partitionOffset(1L)).key(k -> k.length(5).value(v -> v.set("match".getBytes(UTF_8)))).delta(d -> d.type(t -> t.set(KafkaDeltaType.NONE))).headersItem(h -> h.nameLen(4).name(n -> n.set("name".getBytes(UTF_8))).valueLen(5).value(v -> v.set("value".getBytes(UTF_8))))).build();
assertNotNull(matcher.match(byteBuf));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtension.
@Test
public void shouldGenerateMergedBeginExtension() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().key("match").build().filter().header("name", "value").build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == KEY.value() && "match".equals(c.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(c.header().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.KafkaConditionType.KEY in project zilla by aklivity.
the class KafkaFunctionsTest method shouldNotMatchMergedDataExtensionTypeId.
@Test(expected = Exception.class)
public void shouldNotMatchMergedDataExtensionTypeId() throws Exception {
BytesMatcher matcher = KafkaFunctions.matchDataEx().typeId(0x02).build();
ByteBuffer byteBuf = ByteBuffer.allocate(1024);
new KafkaDataExFW.Builder().wrap(new UnsafeBuffer(byteBuf), 0, byteBuf.capacity()).typeId(0x01).merged(f -> f.timestamp(12345678L).partition(p -> p.partitionId(0).partitionOffset(0L)).progressItem(p -> p.partitionId(0).partitionOffset(1L)).key(k -> k.value(v -> v.set("match".getBytes(UTF_8)))).delta(d -> d.type(t -> t.set(KafkaDeltaType.NONE))).headersItem(h -> h.nameLen(4).name(n -> n.set("name".getBytes(UTF_8))).valueLen(5).value(v -> v.set("value".getBytes(UTF_8))))).build();
matcher.match(byteBuf);
}
Aggregations