use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaMergedBeginExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtensionWithHeaderNotEqualsFilter.
@Test
public void shouldGenerateMergedBeginExtensionWithHeaderNotEqualsFilter() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().key("match").build().filter().headerNot("name", "value").build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == KEY.value() && "match".equals(c.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == NOT.value() && c.not().condition().kind() == HEADER.value() && "name".equals(c.not().condition().header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(c.not().condition().header().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaMergedBeginExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtensionWithKeyNotEqualsFilter.
@Test
public void shouldGenerateMergedBeginExtensionWithKeyNotEqualsFilter() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().keyNot("match").build().filter().header("name", "value").build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == NOT.value() && c.not().condition().kind() == KEY.value() && "match".equals(c.not().condition().key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(c.header().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaMergedBeginExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtension.
@Test
public void shouldGenerateMergedBeginExtension() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().key("match").build().filter().header("name", "value").build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == KEY.value() && "match".equals(c.key().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && "value".equals(c.header().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaMergedBeginExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtensionWithNullKeyOrNullHeaderValue.
@Test
public void shouldGenerateMergedBeginExtensionWithNullKeyOrNullHeaderValue() {
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().key(null).build().filter().header("name", null).build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
final MutableInteger partitionCount = new MutableInteger();
mergedBeginEx.partitions().forEach(f -> partitionCount.value++);
assertEquals(1, partitionCount.value);
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(2, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == KEY.value() && Objects.isNull(c.key().value())) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADER.value() && "name".equals(c.header().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o))) && Objects.isNull(c.header().value())) != null));
}
use of io.aklivity.zilla.specs.binding.kafka.internal.types.stream.KafkaMergedBeginExFW in project zilla by aklivity.
the class KafkaFunctionsTest method shouldGenerateMergedBeginExtensionWithHeadersFilter.
@Test
public void shouldGenerateMergedBeginExtensionWithHeadersFilter() {
KafkaValueMatchFW valueMatchRO = new KafkaValueMatchFW();
byte[] build = KafkaFunctions.beginEx().typeId(0x01).merged().topic("topic").partition(0, 1L).filter().headers("name").sequence("one", "two").skip(1).sequence("four").skipMany().build().build().build().build();
DirectBuffer buffer = new UnsafeBuffer(build);
KafkaBeginExFW beginEx = new KafkaBeginExFW().wrap(buffer, 0, buffer.capacity());
assertEquals(0x01, beginEx.typeId());
assertEquals(KafkaApi.MERGED.value(), beginEx.kind());
final KafkaMergedBeginExFW mergedBeginEx = beginEx.merged();
assertEquals("topic", mergedBeginEx.topic().asString());
assertNotNull(mergedBeginEx.partitions().matchFirst(p -> p.partitionId() == 0 && p.partitionOffset() == 1L));
final MutableInteger filterCount = new MutableInteger();
mergedBeginEx.filters().forEach(f -> filterCount.value++);
assertEquals(1, filterCount.value);
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> c.kind() == HEADERS.value() && "name".equals(c.headers().name().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)))) != null));
assertNotNull(mergedBeginEx.filters().matchFirst(f -> f.conditions().matchFirst(c -> {
boolean matches;
final Array32FW<KafkaValueMatchFW> values = c.headers().values();
final DirectBuffer items = values.items();
int progress = 0;
valueMatchRO.wrap(items, progress, items.capacity());
progress = valueMatchRO.limit();
matches = "one".equals(valueMatchRO.value().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
valueMatchRO.wrap(items, progress, items.capacity());
progress = valueMatchRO.limit();
matches &= "two".equals(valueMatchRO.value().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
valueMatchRO.wrap(items, progress, items.capacity());
progress = valueMatchRO.limit();
matches &= KafkaSkip.SKIP == valueMatchRO.skip().get();
valueMatchRO.wrap(items, progress, items.capacity());
progress = valueMatchRO.limit();
matches &= "four".equals(valueMatchRO.value().value().get((b, o, m) -> b.getStringWithoutLengthUtf8(o, m - o)));
valueMatchRO.wrap(items, progress, items.capacity());
progress = valueMatchRO.limit();
matches &= KafkaSkip.SKIP_MANY == valueMatchRO.skip().get();
return c.kind() == HEADERS.value() && matches;
}) != null));
}
Aggregations