use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class KStreamBranchTest method testKStreamBranch.
@SuppressWarnings("unchecked")
@Test
public void testKStreamBranch() {
final StreamsBuilder builder = new StreamsBuilder();
Predicate<Integer, String> isEven = new Predicate<Integer, String>() {
@Override
public boolean test(Integer key, String value) {
return (key % 2) == 0;
}
};
Predicate<Integer, String> isMultipleOfThree = new Predicate<Integer, String>() {
@Override
public boolean test(Integer key, String value) {
return (key % 3) == 0;
}
};
Predicate<Integer, String> isOdd = new Predicate<Integer, String>() {
@Override
public boolean test(Integer key, String value) {
return (key % 2) != 0;
}
};
final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6 };
KStream<Integer, String> stream;
KStream<Integer, String>[] branches;
MockProcessorSupplier<Integer, String>[] processors;
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
branches = stream.branch(isEven, isMultipleOfThree, isOdd);
assertEquals(3, branches.length);
processors = (MockProcessorSupplier<Integer, String>[]) Array.newInstance(MockProcessorSupplier.class, branches.length);
for (int i = 0; i < branches.length; i++) {
processors[i] = new MockProcessorSupplier<>();
branches[i].process(processors[i]);
}
driver.setUp(builder);
for (int expectedKey : expectedKeys) {
driver.process(topicName, expectedKey, "V" + expectedKey);
}
assertEquals(3, processors[0].processed.size());
assertEquals(1, processors[1].processed.size());
assertEquals(2, processors[2].processed.size());
}
use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class KStreamImpl method createRepartitionedSource.
static <K1, V1> String createRepartitionedSource(final InternalStreamsBuilder builder, final Serde<K1> keySerde, final Serde<V1> valSerde, final String topicNamePrefix, final String name) {
Serializer<K1> keySerializer = keySerde != null ? keySerde.serializer() : null;
Serializer<V1> valSerializer = valSerde != null ? valSerde.serializer() : null;
Deserializer<K1> keyDeserializer = keySerde != null ? keySerde.deserializer() : null;
Deserializer<V1> valDeserializer = valSerde != null ? valSerde.deserializer() : null;
String baseName = topicNamePrefix != null ? topicNamePrefix : name;
String repartitionTopic = baseName + REPARTITION_TOPIC_SUFFIX;
String sinkName = builder.newProcessorName(SINK_NAME);
String filterName = builder.newProcessorName(FILTER_NAME);
String sourceName = builder.newProcessorName(SOURCE_NAME);
builder.internalTopologyBuilder.addInternalTopic(repartitionTopic);
builder.internalTopologyBuilder.addProcessor(filterName, new KStreamFilter<>(new Predicate<K1, V1>() {
@Override
public boolean test(final K1 key, final V1 value) {
return key != null;
}
}, false), name);
builder.internalTopologyBuilder.addSink(sinkName, repartitionTopic, keySerializer, valSerializer, null, filterName);
builder.internalTopologyBuilder.addSource(null, sourceName, new FailOnInvalidTimestamp(), keyDeserializer, valDeserializer, repartitionTopic);
return sourceName;
}
use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class StreamsMetadataStateTest method shouldGetAllStreamsInstancesWithNoStores.
@Test
public void shouldGetAllStreamsInstancesWithNoStores() {
builder.stream("topic-five").filter(new Predicate<Object, Object>() {
@Override
public boolean test(final Object key, final Object value) {
return true;
}
}).to("some-other-topic");
final TopicPartition tp5 = new TopicPartition("topic-five", 1);
final HostInfo hostFour = new HostInfo("host-four", 8080);
hostToPartitions.put(hostFour, Utils.mkSet(tp5));
discovery.onChange(hostToPartitions, cluster.withPartitions(Collections.singletonMap(tp5, new PartitionInfo("topic-five", 1, null, null, null))));
final StreamsMetadata expected = new StreamsMetadata(hostFour, Collections.singleton(globalTable), Collections.singleton(tp5));
final Collection<StreamsMetadata> actual = discovery.getAllMetadata();
assertTrue("expected " + actual + " to contain " + expected, actual.contains(expected));
}
use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class KTableFilterTest method testSendingOldValue.
@Test
public void testSendingOldValue() {
StreamsBuilder builder = new StreamsBuilder();
String topic1 = "topic1";
KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
doTestSendingOldValue(builder, table1, table2, topic1);
}
use of org.apache.kafka.streams.kstream.Predicate in project apache-kafka-on-k8s by banzaicloud.
the class KTableFilterTest method testQueryableSendingOldValue.
@Test
public void testQueryableSendingOldValue() {
StreamsBuilder builder = new StreamsBuilder();
String topic1 = "topic1";
KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed);
KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
}, "anyStoreNameFilter");
doTestSendingOldValue(builder, table1, table2, topic1);
}
Aggregations