use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class AbstractJoinIntegrationTest method runTestWithDriver.
void runTestWithDriver(final List<List<TestRecord<Long, String>>> expectedResult, final String storeName) {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(STREAMS_CONFIG), STREAMS_CONFIG)) {
final TestInputTopic<Long, String> right = driver.createInputTopic(INPUT_TOPIC_RIGHT, new LongSerializer(), new StringSerializer());
final TestInputTopic<Long, String> left = driver.createInputTopic(INPUT_TOPIC_LEFT, new LongSerializer(), new StringSerializer());
final TestOutputTopic<Long, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC, new LongDeserializer(), new StringDeserializer());
final Map<String, TestInputTopic<Long, String>> testInputTopicMap = new HashMap<>();
testInputTopicMap.put(INPUT_TOPIC_RIGHT, right);
testInputTopicMap.put(INPUT_TOPIC_LEFT, left);
TestRecord<Long, String> expectedFinalResult = null;
final long firstTimestamp = time.milliseconds();
long eventTimestamp = firstTimestamp;
final Iterator<List<TestRecord<Long, String>>> resultIterator = expectedResult.iterator();
for (final Input<String> singleInputRecord : input) {
testInputTopicMap.get(singleInputRecord.topic).pipeInput(singleInputRecord.record.key, singleInputRecord.record.value, ++eventTimestamp);
final List<TestRecord<Long, String>> expected = resultIterator.next();
if (expected != null) {
final List<TestRecord<Long, String>> updatedExpected = new LinkedList<>();
for (final TestRecord<Long, String> record : expected) {
updatedExpected.add(new TestRecord<>(record.key(), record.value(), null, firstTimestamp + record.timestamp()));
}
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output, equalTo(updatedExpected));
expectedFinalResult = updatedExpected.get(expected.size() - 1);
}
}
if (storeName != null) {
checkQueryableStore(storeName, expectedFinalResult, driver);
}
}
}
use of org.apache.kafka.streams.TestInputTopic in project kafka by apache.
the class AbstractJoinIntegrationTest method runTestWithDriver.
void runTestWithDriver(final TestRecord<Long, String> expectedFinalResult, final String storeName) throws InterruptedException {
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(STREAMS_CONFIG), STREAMS_CONFIG)) {
final TestInputTopic<Long, String> right = driver.createInputTopic(INPUT_TOPIC_RIGHT, new LongSerializer(), new StringSerializer());
final TestInputTopic<Long, String> left = driver.createInputTopic(INPUT_TOPIC_LEFT, new LongSerializer(), new StringSerializer());
final TestOutputTopic<Long, String> outputTopic = driver.createOutputTopic(OUTPUT_TOPIC, new LongDeserializer(), new StringDeserializer());
final Map<String, TestInputTopic<Long, String>> testInputTopicMap = new HashMap<>();
testInputTopicMap.put(INPUT_TOPIC_RIGHT, right);
testInputTopicMap.put(INPUT_TOPIC_LEFT, left);
final long firstTimestamp = time.milliseconds();
long eventTimestamp = firstTimestamp;
for (final Input<String> singleInputRecord : input) {
testInputTopicMap.get(singleInputRecord.topic).pipeInput(singleInputRecord.record.key, singleInputRecord.record.value, ++eventTimestamp);
}
final TestRecord<Long, String> updatedExpectedFinalResult = new TestRecord<>(expectedFinalResult.key(), expectedFinalResult.value(), null, firstTimestamp + expectedFinalResult.timestamp());
final List<TestRecord<Long, String>> output = outputTopic.readRecordsToList();
assertThat(output.get(output.size() - 1), equalTo(updatedExpectedFinalResult));
if (storeName != null) {
checkQueryableStore(storeName, updatedExpectedFinalResult, driver);
}
}
}
Aggregations