use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class KStreamMapTest method testMap.
@Test
public void testMap() {
final StreamsBuilder builder = new StreamsBuilder();
final String topicName = "topic";
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.map((key, value) -> KeyValue.pair(value, key)).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
for (final int expectedKey : expectedKeys) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput(expectedKey, "V" + expectedKey, 10L - expectedKey);
}
}
final KeyValueTimestamp[] expected = new KeyValueTimestamp[] { new KeyValueTimestamp<>("V0", 0, 10), new KeyValueTimestamp<>("V1", 1, 9), new KeyValueTimestamp<>("V2", 2, 8), new KeyValueTimestamp<>("V3", 3, 7) };
assertEquals(4, supplier.theCapturedProcessor().processed().size());
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class KStreamMapValuesTest method testMapValuesWithKeys.
@Test
public void testMapValuesWithKeys() {
final StreamsBuilder builder = new StreamsBuilder();
final ValueMapperWithKey<Integer, CharSequence, Integer> mapper = (readOnlyKey, value) -> value.length() + readOnlyKey;
final int[] expectedKeys = { 1, 10, 100, 1000 };
final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.mapValues(mapper).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, Integer.toString(expectedKey), expectedKey / 2L);
}
}
final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(1, 2, 0), new KeyValueTimestamp<>(10, 12, 5), new KeyValueTimestamp<>(100, 103, 50), new KeyValueTimestamp<>(1000, 1004, 500) };
assertArrayEquals(expected, supplier.theCapturedProcessor().processed().toArray());
}
use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class KStreamBranchTest method testKStreamBranch.
// Old PAPI. Needs to be migrated.
@SuppressWarnings({ "unchecked", "deprecation" })
@Test
public void testKStreamBranch() {
final StreamsBuilder builder = new StreamsBuilder();
final Predicate<Integer, String> isEven = (key, value) -> (key % 2) == 0;
final Predicate<Integer, String> isMultipleOfThree = (key, value) -> (key % 3) == 0;
final Predicate<Integer, String> isOdd = (key, value) -> (key % 2) != 0;
final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6 };
final KStream<Integer, String> stream;
final KStream<Integer, String>[] branches;
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
branches = stream.branch(isEven, isMultipleOfThree, isOdd);
assertEquals(3, branches.length);
final MockProcessorSupplier<Integer, String> supplier = new MockProcessorSupplier<>();
for (final KStream<Integer, String> branch : branches) {
branch.process(supplier);
}
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
final List<MockProcessor<Integer, String>> processors = supplier.capturedProcessors(3);
assertEquals(3, processors.get(0).processed().size());
assertEquals(1, processors.get(1).processed().size());
assertEquals(2, processors.get(2).processed().size());
}
use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class KStreamImplTest method shouldSupportKeyChangeKTableFromKStream.
@Test
public void shouldSupportKeyChangeKTableFromKStream() {
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final StreamsBuilder builder = new StreamsBuilder();
final String input = "input";
final String output = "output";
builder.stream(input, consumed).map((key, value) -> new KeyValue<>(key.charAt(0) - 'A', value)).toTable(Materialized.with(Serdes.Integer(), null)).toStream().to(output);
final Topology topology = builder.build();
final String topologyDescription = topology.describe().toString();
assertThat(topologyDescription, equalTo("Topologies:\n" + " Sub-topology: 0\n" + " Source: KSTREAM-SOURCE-0000000000 (topics: [input])\n" + " --> KSTREAM-MAP-0000000001\n" + " Processor: KSTREAM-MAP-0000000001 (stores: [])\n" + " --> KSTREAM-FILTER-0000000005\n" + " <-- KSTREAM-SOURCE-0000000000\n" + " Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" + " --> KSTREAM-SINK-0000000004\n" + " <-- KSTREAM-MAP-0000000001\n" + " Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-TOTABLE-0000000002-repartition)\n" + " <-- KSTREAM-FILTER-0000000005\n" + "\n" + " Sub-topology: 1\n" + " Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-TOTABLE-0000000002-repartition])\n" + " --> KSTREAM-TOTABLE-0000000002\n" + " Processor: KSTREAM-TOTABLE-0000000002 (stores: [])\n" + " --> KTABLE-TOSTREAM-0000000007\n" + " <-- KSTREAM-SOURCE-0000000006\n" + " Processor: KTABLE-TOSTREAM-0000000007 (stores: [])\n" + " --> KSTREAM-SINK-0000000008\n" + " <-- KSTREAM-TOTABLE-0000000002\n" + " Sink: KSTREAM-SINK-0000000008 (topic: output)\n" + " <-- KTABLE-TOSTREAM-0000000007\n\n"));
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(input, Serdes.String().serializer(), Serdes.String().serializer());
final TestOutputTopic<Integer, String> outputTopic = driver.createOutputTopic(output, Serdes.Integer().deserializer(), Serdes.String().deserializer());
inputTopic.pipeInput("A", "01", 5L);
inputTopic.pipeInput("B", "02", 100L);
inputTopic.pipeInput("C", "03", 0L);
inputTopic.pipeInput("D", "04", 0L);
inputTopic.pipeInput("A", "05", 10L);
inputTopic.pipeInput("A", "06", 8L);
final List<TestRecord<Integer, String>> outputExpectRecords = new ArrayList<>();
outputExpectRecords.add(new TestRecord<>(0, "01", Instant.ofEpochMilli(5L)));
outputExpectRecords.add(new TestRecord<>(1, "02", Instant.ofEpochMilli(100L)));
outputExpectRecords.add(new TestRecord<>(2, "03", Instant.ofEpochMilli(0L)));
outputExpectRecords.add(new TestRecord<>(3, "04", Instant.ofEpochMilli(0L)));
outputExpectRecords.add(new TestRecord<>(0, "05", Instant.ofEpochMilli(10L)));
outputExpectRecords.add(new TestRecord<>(0, "06", Instant.ofEpochMilli(8L)));
assertEquals(outputTopic.readRecordsToList(), outputExpectRecords);
}
}
use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class KStreamFlatMapTest method testFlatMap.
@Test
public void testFlatMap() {
final StreamsBuilder builder = new StreamsBuilder();
final String topicName = "topic";
final KeyValueMapper<Number, Object, Iterable<KeyValue<String, String>>> mapper = (key, value) -> {
final ArrayList<KeyValue<String, String>> result = new ArrayList<>();
for (int i = 0; i < key.intValue(); i++) {
result.add(KeyValue.pair(Integer.toString(key.intValue() * 10 + i), value.toString()));
}
return result;
};
final int[] expectedKeys = { 0, 1, 2, 3 };
final KStream<Integer, String> stream;
final MockApiProcessorSupplier<String, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.flatMap(mapper).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0), Duration.ZERO);
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
assertEquals(6, supplier.theCapturedProcessor().processed().size());
final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>("10", "V1", 0), new KeyValueTimestamp<>("20", "V2", 0), new KeyValueTimestamp<>("21", "V2", 0), new KeyValueTimestamp<>("30", "V3", 0), new KeyValueTimestamp<>("31", "V3", 0), new KeyValueTimestamp<>("32", "V3", 0) };
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], supplier.theCapturedProcessor().processed().get(i));
}
}
Aggregations