use of org.apache.kafka.streams.kstream.ValueMapper in project apache-kafka-on-k8s by banzaicloud.
the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.
/**
* This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
* It is based on a fairly complicated join used by the developer that reported the bug.
* Before the fix this would trigger an IllegalStateException.
*/
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
final String agg = "agg";
final String tableOne = "tableOne";
final String tableTwo = "tableTwo";
final String tableThree = "tableThree";
final String tableFour = "tableFour";
final String tableFive = "tableFive";
final String tableSix = "tableSix";
final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
final StreamsBuilder builder = new StreamsBuilder();
final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
final KTable<Long, String> aggTable = builder.table(agg, consumed).groupBy(new KeyValueMapper<Long, String, KeyValue<Long, String>>() {
@Override
public KeyValue<Long, String> apply(final Long key, final String value) {
return new KeyValue<>(key, value);
}
}, Serialized.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, "agg-store");
final KTable<Long, String> one = builder.table(tableOne, consumed);
final KTable<Long, String> two = builder.table(tableTwo, consumed);
final KTable<Long, String> three = builder.table(tableThree, consumed);
final KTable<Long, String> four = builder.table(tableFour, consumed);
final KTable<Long, String> five = builder.table(tableFive, consumed);
final KTable<Long, String> six = builder.table(tableSix, consumed);
final ValueMapper<String, String> mapper = new ValueMapper<String, String>() {
@Override
public String apply(final String value) {
return value.toUpperCase(Locale.ROOT);
}
};
final KTable<Long, String> seven = one.mapValues(mapper);
final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
driver.setUp(builder, stateDir, 250);
final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
final Random random = new Random();
for (int i = 0; i < 1000; i++) {
for (String input : inputs) {
final Long key = (long) random.nextInt(1000);
final String value = values[random.nextInt(values.length)];
driver.process(input, key, value);
}
}
}
use of org.apache.kafka.streams.kstream.ValueMapper in project apache-kafka-on-k8s by banzaicloud.
the class KTableMapValuesTest method testValueGetter.
@Test
public void testValueGetter() {
StreamsBuilder builder = new StreamsBuilder();
String topic1 = "topic1";
String topic2 = "topic2";
String storeName2 = "storeName2";
KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(new ValueMapper<String, Integer>() {
@Override
public Integer apply(String value) {
return new Integer(value);
}
});
KTableImpl<String, Integer, Integer> table3 = (KTableImpl<String, Integer, Integer>) table2.filter(new Predicate<String, Integer>() {
@Override
public boolean test(String key, Integer value) {
return (value % 2) == 0;
}
});
KTableImpl<String, String, String> table4 = (KTableImpl<String, String, String>) table1.through(stringSerde, stringSerde, topic2, storeName2);
doTestValueGetter(builder, topic1, table1, table2, table3, table4);
}
use of org.apache.kafka.streams.kstream.ValueMapper in project apache-kafka-on-k8s by banzaicloud.
the class KTableMapValuesTest method testNotSendingOldValue.
@Test
public void testNotSendingOldValue() {
StreamsBuilder builder = new StreamsBuilder();
String topic1 = "topic1";
KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(new ValueMapper<String, Integer>() {
@Override
public Integer apply(String value) {
return new Integer(value);
}
});
MockProcessorSupplier<String, Integer> proc = new MockProcessorSupplier<>();
builder.build().addProcessor("proc", proc, table2.name);
driver.setUp(builder, stateDir);
assertFalse(table1.sendingOldValueEnabled());
assertFalse(table2.sendingOldValueEnabled());
driver.process(topic1, "A", "01");
driver.process(topic1, "B", "01");
driver.process(topic1, "C", "01");
driver.flushState();
proc.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
driver.process(topic1, "A", "02");
driver.process(topic1, "B", "02");
driver.flushState();
proc.checkAndClearProcessResult("A:(2<-null)", "B:(2<-null)");
driver.process(topic1, "A", "03");
driver.flushState();
proc.checkAndClearProcessResult("A:(3<-null)");
driver.process(topic1, "A", null);
driver.flushState();
proc.checkAndClearProcessResult("A:(null<-null)");
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class KTableImplTest method shouldPreserveSerdesForOperators.
@Test
public void shouldPreserveSerdesForOperators() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, String> table1 = builder.table("topic-2", stringConsumed);
final ConsumedInternal<String, String> consumedInternal = new ConsumedInternal<>(stringConsumed);
final KeyValueMapper<String, String, String> selector = (key, value) -> key;
final ValueMapper<String, String> mapper = value -> value;
final ValueJoiner<String, String, String> joiner = (value1, value2) -> value1;
final ValueTransformerWithKeySupplier<String, String, String> valueTransformerWithKeySupplier = () -> new ValueTransformerWithKey<String, String, String>() {
@Override
public void init(final ProcessorContext context) {
}
@Override
public String transform(final String key, final String value) {
return value;
}
@Override
public void close() {
}
};
assertEquals(((AbstractStream) table1.filter((key, value) -> false)).keySerde(), consumedInternal.keySerde());
assertEquals(((AbstractStream) table1.filter((key, value) -> false)).valueSerde(), consumedInternal.valueSerde());
assertEquals(((AbstractStream) table1.filter((key, value) -> false, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.filter((key, value) -> false, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.filterNot((key, value) -> false)).keySerde(), consumedInternal.keySerde());
assertEquals(((AbstractStream) table1.filterNot((key, value) -> false)).valueSerde(), consumedInternal.valueSerde());
assertEquals(((AbstractStream) table1.filterNot((key, value) -> false, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.filterNot((key, value) -> false, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.mapValues(mapper)).keySerde(), consumedInternal.keySerde());
assertNull(((AbstractStream) table1.mapValues(mapper)).valueSerde());
assertEquals(((AbstractStream) table1.mapValues(mapper, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.mapValues(mapper, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.toStream()).keySerde(), consumedInternal.keySerde());
assertEquals(((AbstractStream) table1.toStream()).valueSerde(), consumedInternal.valueSerde());
assertNull(((AbstractStream) table1.toStream(selector)).keySerde());
assertEquals(((AbstractStream) table1.toStream(selector)).valueSerde(), consumedInternal.valueSerde());
assertEquals(((AbstractStream) table1.transformValues(valueTransformerWithKeySupplier)).keySerde(), consumedInternal.keySerde());
assertNull(((AbstractStream) table1.transformValues(valueTransformerWithKeySupplier)).valueSerde());
assertEquals(((AbstractStream) table1.transformValues(valueTransformerWithKeySupplier, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.transformValues(valueTransformerWithKeySupplier, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertNull(((AbstractStream) table1.groupBy(KeyValue::new)).keySerde());
assertNull(((AbstractStream) table1.groupBy(KeyValue::new)).valueSerde());
assertEquals(((AbstractStream) table1.groupBy(KeyValue::new, Grouped.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.groupBy(KeyValue::new, Grouped.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.join(table1, joiner)).keySerde(), consumedInternal.keySerde());
assertNull(((AbstractStream) table1.join(table1, joiner)).valueSerde());
assertEquals(((AbstractStream) table1.join(table1, joiner, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.join(table1, joiner, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.leftJoin(table1, joiner)).keySerde(), consumedInternal.keySerde());
assertNull(((AbstractStream) table1.leftJoin(table1, joiner)).valueSerde());
assertEquals(((AbstractStream) table1.leftJoin(table1, joiner, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.leftJoin(table1, joiner, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
assertEquals(((AbstractStream) table1.outerJoin(table1, joiner)).keySerde(), consumedInternal.keySerde());
assertNull(((AbstractStream) table1.outerJoin(table1, joiner)).valueSerde());
assertEquals(((AbstractStream) table1.outerJoin(table1, joiner, Materialized.with(mySerde, mySerde))).keySerde(), mySerde);
assertEquals(((AbstractStream) table1.outerJoin(table1, joiner, Materialized.with(mySerde, mySerde))).valueSerde(), mySerde);
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class KStreamImplTest method testNumProcesses.
// specifically testing the deprecated variant
@SuppressWarnings({ "unchecked", "deprecation" })
@Test
public void testNumProcesses() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> source1 = builder.stream(Arrays.asList("topic-1", "topic-2"), stringConsumed);
final KStream<String, String> source2 = builder.stream(Arrays.asList("topic-3", "topic-4"), stringConsumed);
final KStream<String, String> stream1 = source1.filter((key, value) -> true).filterNot((key, value) -> false);
final KStream<String, Integer> stream2 = stream1.mapValues((ValueMapper<String, Integer>) Integer::valueOf);
final KStream<String, Integer> stream3 = source2.flatMapValues((ValueMapper<String, Iterable<Integer>>) value -> Collections.singletonList(Integer.valueOf(value)));
final KStream<String, Integer>[] streams2 = stream2.branch((key, value) -> (value % 2) == 0, (key, value) -> true);
final KStream<String, Integer>[] streams3 = stream3.branch((key, value) -> (value % 2) == 0, (key, value) -> true);
final int anyWindowSize = 1;
final StreamJoined<String, Integer, Integer> joined = StreamJoined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer());
final KStream<String, Integer> stream4 = streams2[0].join(streams3[0], Integer::sum, JoinWindows.of(ofMillis(anyWindowSize)), joined);
streams2[1].join(streams3[1], Integer::sum, JoinWindows.of(ofMillis(anyWindowSize)), joined);
stream4.to("topic-5");
streams2[1].through("topic-6").process(new MockProcessorSupplier<>());
streams2[1].repartition().process(new MockProcessorSupplier<>());
assertEquals(// sources
2 + // stream1
2 + // stream2
1 + // stream3
1 + 1 + // streams2
2 + 1 + // streams3
2 + // stream2-stream3 joins
5 * 2 + // to
1 + // through
2 + // process
1 + // repartition
3 + // process
1, TopologyWrapper.getInternalTopologyBuilder(builder.build()).setApplicationId("X").buildTopology().processors().size());
}
Aggregations