use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class CogroupedKStreamImplTest method shouldAllowDifferentOutputTypeInCoGroup.
@Test
public void shouldAllowDifferentOutputTypeInCoGroup() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
final KGroupedStream<String, String> grouped1 = stream1.groupByKey();
final KGroupedStream<String, String> grouped2 = stream2.groupByKey();
final KTable<String, Integer> customers = grouped1.cogroup(STRING_SUM_AGGREGATOR).cogroup(grouped2, STRING_SUM_AGGREGATOR).aggregate(SUM_INITIALIZER, Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as("store1").withValueSerde(Serdes.Integer()));
customers.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic("one", new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic("two", new StringSerializer(), new StringSerializer());
final TestOutputTopic<String, Integer> testOutputTopic = driver.createOutputTopic(OUTPUT, new StringDeserializer(), new IntegerDeserializer());
testInputTopic.pipeInput("k1", "1", 0L);
testInputTopic.pipeInput("k2", "1", 1L);
testInputTopic.pipeInput("k1", "1", 10L);
testInputTopic.pipeInput("k2", "1", 100L);
testInputTopic2.pipeInput("k2", "2", 100L);
testInputTopic2.pipeInput("k2", "2", 200L);
testInputTopic2.pipeInput("k1", "2", 1L);
testInputTopic2.pipeInput("k2", "2", 500L);
testInputTopic2.pipeInput("k1", "2", 500L);
testInputTopic2.pipeInput("k2", "3", 500L);
testInputTopic2.pipeInput("k3", "2", 500L);
testInputTopic2.pipeInput("k2", "2", 100L);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", 1, 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 1, 1);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", 2, 10);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 2, 100);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 4, 100);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 6, 200);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", 4, 10);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 8, 500);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", 6, 500);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", 11, 500);
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class CogroupedKStreamImplTest method testCogroupKeyMixedAggregators.
@Test
public void testCogroupKeyMixedAggregators() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, String> stream1 = builder.stream("one", stringConsumed);
final KStream<String, String> stream2 = builder.stream("two", stringConsumed);
final KGroupedStream<String, String> grouped1 = stream1.groupByKey();
final KGroupedStream<String, String> grouped2 = stream2.groupByKey();
final KTable<String, String> customers = grouped1.cogroup(MockAggregator.TOSTRING_REMOVER).cogroup(grouped2, MockAggregator.TOSTRING_ADDER).aggregate(MockInitializer.STRING_INIT, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("store1").withValueSerde(Serdes.String()));
customers.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic("one", new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic("two", new StringSerializer(), new StringSerializer());
final TestOutputTopic<String, String> testOutputTopic = driver.createOutputTopic(OUTPUT, new StringDeserializer(), new StringDeserializer());
testInputTopic.pipeInput("k1", "1", 0L);
testInputTopic.pipeInput("k2", "1", 1L);
testInputTopic.pipeInput("k1", "1", 10L);
testInputTopic.pipeInput("k2", "1", 100L);
testInputTopic2.pipeInput("k1", "2", 500L);
testInputTopic2.pipeInput("k2", "2", 500L);
testInputTopic2.pipeInput("k1", "2", 500L);
testInputTopic2.pipeInput("k2", "2", 100L);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0-1", 0);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0-1", 1);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0-1-1", 10);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0-1-1", 100);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0-1-1+2", 500L);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0-1-1+2", 500L);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "0-1-1+2+2", 500L);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "0-1-1+2+2", 500L);
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class CogroupedKStreamImplTest method testCogroupWithKTableKTableInnerJoin.
@Test
public void testCogroupWithKTableKTableInnerJoin() {
final StreamsBuilder builder = new StreamsBuilder();
final KGroupedStream<String, String> grouped1 = builder.stream("one", stringConsumed).groupByKey();
final KGroupedStream<String, String> grouped2 = builder.stream("two", stringConsumed).groupByKey();
final KTable<String, String> table1 = grouped1.cogroup(STRING_AGGREGATOR).cogroup(grouped2, STRING_AGGREGATOR).aggregate(STRING_INITIALIZER, Named.as("name"), Materialized.as("store"));
final KTable<String, String> table2 = builder.table("three", stringConsumed);
final KTable<String, String> joined = table1.join(table2, MockValueJoiner.TOSTRING_JOINER, Materialized.with(Serdes.String(), Serdes.String()));
joined.toStream().to(OUTPUT);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, String> testInputTopic = driver.createInputTopic("one", new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic2 = driver.createInputTopic("two", new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> testInputTopic3 = driver.createInputTopic("three", new StringSerializer(), new StringSerializer());
final TestOutputTopic<String, String> testOutputTopic = driver.createOutputTopic(OUTPUT, new StringDeserializer(), new StringDeserializer());
testInputTopic.pipeInput("k1", "A", 5L);
testInputTopic2.pipeInput("k2", "B", 6L);
assertTrue(testOutputTopic.isEmpty());
testInputTopic3.pipeInput("k1", "C", 0L);
testInputTopic3.pipeInput("k2", "D", 10L);
assertOutputKeyValueTimestamp(testOutputTopic, "k1", "A+C", 5L);
assertOutputKeyValueTimestamp(testOutputTopic, "k2", "B+D", 10L);
assertTrue(testOutputTopic.isEmpty());
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class KStreamImplTest method shouldSupportStreamTableJoinWithKStreamToKTable.
@Test
public void shouldSupportStreamTableJoinWithKStreamToKTable() {
final StreamsBuilder builder = new StreamsBuilder();
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final String streamTopic = "streamTopic";
final String tableTopic = "tableTopic";
final String outputTopic = "output";
final KStream<String, String> stream = builder.stream(streamTopic, consumed);
final KTable<String, String> table = builder.stream(tableTopic, consumed).toTable();
stream.join(table, MockValueJoiner.TOSTRING_JOINER).to(outputTopic);
final Topology topology = builder.build(props);
final String topologyDescription = topology.describe().toString();
assertThat(topologyDescription, equalTo("Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [streamTopic])\n" + " --> KSTREAM-JOIN-0000000004\n" + " Processor: KSTREAM-JOIN-0000000004 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000003])\n" + " --> KSTREAM-SINK-0000000005\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Source: KSTREAM-SOURCE-0000000001 (topics: [tableTopic])\n" + " --> KSTREAM-TOTABLE-0000000002\n" + " Sink: KSTREAM-SINK-0000000005 (topic: output)\n" + " <-- KSTREAM-JOIN-0000000004\n" + " Processor: KSTREAM-TOTABLE-0000000002 (stores: [KSTREAM-TOTABLE-STATE-STORE-0000000003])\n" + " --> none\n" + " <-- KSTREAM-SOURCE-0000000001\n\n"));
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> left = driver.createInputTopic(streamTopic, new StringSerializer(), new StringSerializer());
final TestInputTopic<String, String> right = driver.createInputTopic(tableTopic, new StringSerializer(), new StringSerializer());
final TestOutputTopic<String, String> output = driver.createOutputTopic(outputTopic, new StringDeserializer(), new StringDeserializer());
right.pipeInput("lhs1", "rhsValue1");
right.pipeInput("rhs2", "rhsValue2");
right.pipeInput("lhs3", "rhsValue3");
assertThat(output.readKeyValuesToMap(), is(emptyMap()));
left.pipeInput("lhs1", "lhsValue1");
left.pipeInput("lhs2", "lhsValue2");
final Map<String, String> expected = mkMap(mkEntry("lhs1", "lhsValue1+rhsValue1"));
assertThat(output.readKeyValuesToMap(), is(expected));
left.pipeInput("lhs3", "lhsValue3");
assertThat(output.readKeyValuesToMap(), is(mkMap(mkEntry("lhs3", "lhsValue3+rhsValue3"))));
left.pipeInput("lhs1", "lhsValue4");
assertThat(output.readKeyValuesToMap(), is(mkMap(mkEntry("lhs1", "lhsValue4+rhsValue1"))));
}
}
use of org.apache.kafka.common.serialization.StringDeserializer in project kafka by apache.
the class KStreamImplValueJoinerWithKeyTest method runJoinTopology.
private void runJoinTopology(final StreamsBuilder builder, final List<KeyValue<String, String>> expectedResults, final boolean isTableJoin, final String rightTopic) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> rightInputTopic = driver.createInputTopic(rightTopic, new StringSerializer(), new IntegerSerializer());
final TestInputTopic<String, Integer> leftInputTopic = driver.createInputTopic(leftTopic, new StringSerializer(), new IntegerSerializer());
final TestOutputTopic<String, String> joinResultTopic = driver.createOutputTopic(outputTopic, new StringDeserializer(), new StringDeserializer());
// the table first, joins only triggered from streams side
if (isTableJoin) {
rightInputTopic.pipeInput("A", 2);
leftInputTopic.pipeInput("A", 3);
} else {
leftInputTopic.pipeInput("A", 3);
rightInputTopic.pipeInput("A", 2);
}
final List<KeyValue<String, String>> actualResult = joinResultTopic.readKeyValuesToList();
assertEquals(expectedResults, actualResult);
}
}
Aggregations