use of io.confluent.ksql.test.tools.TopicInfoCache.TopicInfo in project ksql by confluentinc.
the class TestExecutor method serialize.
private ProducerRecord<byte[], byte[]> serialize(final ProducerRecord<?, ?> rec) {
final TopicInfo topicInfo = topicInfoCache.get(rec.topic()).orElseThrow(() -> new KsqlException("No information found for topic: " + rec.topic()));
final byte[] key;
final byte[] value;
try {
key = topicInfo.getKeySerializer().serialize(rec.topic(), rec.key());
} catch (final Exception e) {
throw new AssertionError("Failed to serialize key: " + e.getMessage() + System.lineSeparator() + "rec: " + rec, e);
}
try {
value = topicInfo.getValueSerializer().serialize(rec.topic(), rec.value());
} catch (final Exception e) {
throw new AssertionError("Failed to serialize value: " + e.getMessage() + System.lineSeparator() + "rec: " + rec, e);
}
return new ProducerRecord<>(rec.topic(), rec.partition(), rec.timestamp(), key, value, rec.headers());
}
use of io.confluent.ksql.test.tools.TopicInfoCache.TopicInfo in project ksql by confluentinc.
the class TestExecutor method verifyOutput.
private void verifyOutput(final TestCase testCase) {
final boolean ranWithInsertStatements = testCase.getInputRecords().size() == 0;
final Map<String, List<Record>> expectedByTopic = testCase.getOutputRecords().stream().collect(Collectors.groupingBy(Record::getTopicName));
final Map<String, List<ProducerRecord<byte[], byte[]>>> actualByTopic = expectedByTopic.keySet().stream().collect(Collectors.toMap(Function.identity(), kafka::readRecords));
expectedByTopic.forEach((kafkaTopic, expectedRecords) -> {
final TopicInfo topicInfo = topicInfoCache.get(kafkaTopic).orElseThrow(() -> new KsqlException("No information found for topic: " + kafkaTopic));
final List<ProducerRecord<?, ?>> actualRecords = actualByTopic.getOrDefault(kafkaTopic, ImmutableList.of()).stream().map(rec -> deserialize(rec, topicInfo)).collect(Collectors.toList());
if (validateResults) {
validateTopicData(kafkaTopic, expectedRecords, actualRecords, ranWithInsertStatements);
}
});
}
use of io.confluent.ksql.test.tools.TopicInfoCache.TopicInfo in project ksql by confluentinc.
the class RestTestExecutor method verifyOutput.
private void verifyOutput(final RestTestCase testCase) {
testCase.getOutputsByTopic().forEach((topicName, records) -> {
final TopicInfo topicInfo = topicInfoCache.get(topicName).orElseThrow(() -> new KsqlException("No information found for topic: " + topicName));
final List<? extends ConsumerRecord<?, ?>> received = kafkaCluster.verifyAvailableRecords(topicName, records.size(), topicInfo.getKeyDeserializer(), topicInfo.getValueDeserializer());
for (int idx = 0; idx < records.size(); idx++) {
final Record expected = records.get(idx);
final ConsumerRecord<?, ?> actual = received.get(idx);
compareKeyValueTimestamp(actual, expected);
}
});
}
Aggregations