use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamMapValuesTest method testMapValuesWithKeys.
@Test
public void testMapValuesWithKeys() {
final StreamsBuilder builder = new StreamsBuilder();
final ValueMapperWithKey<Integer, CharSequence, Integer> mapper = (readOnlyKey, value) -> value.length() + readOnlyKey;
final int[] expectedKeys = { 1, 10, 100, 1000 };
final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
stream.mapValues(mapper).process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, Integer.toString(expectedKey), expectedKey / 2L);
}
}
final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(1, 2, 0), new KeyValueTimestamp<>(10, 12, 5), new KeyValueTimestamp<>(100, 103, 50), new KeyValueTimestamp<>(1000, 1004, 500) };
assertArrayEquals(expected, supplier.theCapturedProcessor().processed().toArray());
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamPeekTest method shouldObserveStreamElements.
@Test
public void shouldObserveStreamElements() {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Integer, String> stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
final List<KeyValue<Integer, String>> peekObserved = new ArrayList<>(), streamObserved = new ArrayList<>();
stream.peek(collect(peekObserved)).foreach(collect(streamObserved));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
final List<KeyValue<Integer, String>> expected = new ArrayList<>();
for (int key = 0; key < 32; key++) {
final String value = "V" + key;
inputTopic.pipeInput(key, value);
expected.add(new KeyValue<>(key, value));
}
assertEquals(expected, peekObserved);
assertEquals(expected, streamObserved);
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamRepartitionTest method shouldInvokePartitionerWhenSet.
@Test
public void shouldInvokePartitionerWhenSet() {
final int[] expectedKeys = new int[] { 0, 1 };
final StreamPartitioner<Integer, String> streamPartitionerMock = EasyMock.mock(StreamPartitioner.class);
expect(streamPartitionerMock.partition(anyString(), eq(0), eq("X0"), anyInt())).andReturn(1).times(1);
expect(streamPartitionerMock.partition(anyString(), eq(1), eq("X1"), anyInt())).andReturn(1).times(1);
replay(streamPartitionerMock);
final String repartitionOperationName = "test";
final Repartitioned<Integer, String> repartitioned = Repartitioned.streamPartitioner(streamPartitionerMock).withName(repartitionOperationName);
builder.<Integer, String>stream(inputTopic).repartition(repartitioned);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> testInputTopic = driver.createInputTopic(inputTopic, new IntegerSerializer(), new StringSerializer());
final String topicName = repartitionOutputTopic(props, repartitionOperationName);
final TestOutputTopic<Integer, String> testOutputTopic = driver.createOutputTopic(topicName, new IntegerDeserializer(), new StringDeserializer());
for (int i = 0; i < 2; i++) {
testInputTopic.pipeInput(expectedKeys[i], "X" + expectedKeys[i], i + 10);
}
assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(0, "X0", Instant.ofEpochMilli(10))));
assertThat(testOutputTopic.readRecord(), equalTo(new TestRecord<>(1, "X1", Instant.ofEpochMilli(11))));
assertTrue(testOutputTopic.readRecordsToList().isEmpty());
}
verify(streamPartitionerMock);
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamBranchTest method testKStreamBranch.
// Old PAPI. Needs to be migrated.
@SuppressWarnings({ "unchecked", "deprecation" })
@Test
public void testKStreamBranch() {
final StreamsBuilder builder = new StreamsBuilder();
final Predicate<Integer, String> isEven = (key, value) -> (key % 2) == 0;
final Predicate<Integer, String> isMultipleOfThree = (key, value) -> (key % 3) == 0;
final Predicate<Integer, String> isOdd = (key, value) -> (key % 2) != 0;
final int[] expectedKeys = new int[] { 1, 2, 3, 4, 5, 6 };
final KStream<Integer, String> stream;
final KStream<Integer, String>[] branches;
stream = builder.stream(topicName, Consumed.with(Serdes.Integer(), Serdes.String()));
branches = stream.branch(isEven, isMultipleOfThree, isOdd);
assertEquals(3, branches.length);
final MockProcessorSupplier<Integer, String> supplier = new MockProcessorSupplier<>();
for (final KStream<Integer, String> branch : branches) {
branch.process(supplier);
}
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic = driver.createInputTopic(topicName, new IntegerSerializer(), new StringSerializer());
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, "V" + expectedKey);
}
}
final List<MockProcessor<Integer, String>> processors = supplier.capturedProcessors(3);
assertEquals(3, processors.get(0).processed().size());
assertEquals(1, processors.get(1).processed().size());
assertEquals(2, processors.get(2).processed().size());
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamKStreamJoinTest method runJoin.
private void runJoin(final StreamJoined<String, Integer, Integer> streamJoined, final JoinWindows joinWindows) {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, Integer> left = builder.stream("left", Consumed.with(Serdes.String(), Serdes.Integer()));
final KStream<String, Integer> right = builder.stream("right", Consumed.with(Serdes.String(), Serdes.Integer()));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final KStream<String, Integer> joinedStream;
joinedStream = left.join(right, Integer::sum, joinWindows, streamJoined);
joinedStream.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopicLeft = driver.createInputTopic("left", new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, Integer> inputTopicRight = driver.createInputTopic("right", new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<String, Integer, Void, Void> processor = supplier.theCapturedProcessor();
inputTopicLeft.pipeInput("A", 1, 1L);
inputTopicLeft.pipeInput("B", 1, 2L);
inputTopicRight.pipeInput("A", 1, 1L);
inputTopicRight.pipeInput("B", 2, 2L);
processor.checkAndClearProcessResult(new KeyValueTimestamp<>("A", 2, 1L), new KeyValueTimestamp<>("B", 3, 2L));
}
}
Aggregations