use of org.apache.kafka.common.serialization.IntegerDeserializer in project kafka by apache.
the class KTableKTableForeignKeyJoinScenarioTest method validateTopologyCanProcessData.
private void validateTopologyCanProcessData(final StreamsBuilder builder) {
final Properties config = new Properties();
final String safeTestName = safeUniqueTestName(getClass(), testName);
config.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.IntegerSerde.class.getName());
config.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
config.setProperty(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
try (final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(builder.build(), config)) {
final TestInputTopic<Integer, String> aTopic = topologyTestDriver.createInputTopic("A", new IntegerSerializer(), new StringSerializer());
final TestInputTopic<Integer, String> bTopic = topologyTestDriver.createInputTopic("B", new IntegerSerializer(), new StringSerializer());
final TestOutputTopic<Integer, String> output = topologyTestDriver.createOutputTopic("output", new IntegerDeserializer(), new StringDeserializer());
aTopic.pipeInput(1, "999-alpha");
bTopic.pipeInput(999, "beta");
final Map<Integer, String> x = output.readKeyValuesToMap();
assertThat(x, is(Collections.singletonMap(1, "(999-alpha,(999-alpha,beta))")));
}
}
use of org.apache.kafka.common.serialization.IntegerDeserializer in project kafka by apache.
the class KTableSourceTest method testKTableSourceEmitOnChange.
// we have disabled KIP-557 until KAFKA-12508 can be properly addressed
@Ignore
@Test
public void testKTableSourceEmitOnChange() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
builder.table(topic1, Consumed.with(Serdes.String(), Serdes.Integer()), Materialized.as("store")).toStream().to("output");
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer());
final TestOutputTopic<String, Integer> outputTopic = driver.createOutputTopic("output", new StringDeserializer(), new IntegerDeserializer());
inputTopic.pipeInput("A", 1, 10L);
inputTopic.pipeInput("B", 2, 11L);
inputTopic.pipeInput("A", 1, 12L);
inputTopic.pipeInput("B", 3, 13L);
// this record should be kept since this is out of order, so the timestamp
// should be updated in this scenario
inputTopic.pipeInput("A", 1, 9L);
assertEquals(1.0, getMetricByName(driver.metrics(), "idempotent-update-skip-total", "stream-processor-node-metrics").metricValue());
assertEquals(asList(new TestRecord<>("A", 1, Instant.ofEpochMilli(10L)), new TestRecord<>("B", 2, Instant.ofEpochMilli(11L)), new TestRecord<>("B", 3, Instant.ofEpochMilli(13L)), new TestRecord<>("A", 1, Instant.ofEpochMilli(9L))), outputTopic.readRecordsToList());
}
}
use of org.apache.kafka.common.serialization.IntegerDeserializer in project kafka by apache.
the class KStreamSplitTest method testKStreamSplit.
@Test
public void testKStreamSplit() {
final Map<String, KStream<Integer, String>> branches = source.split().branch(isEven, Branched.withConsumer(ks -> ks.to("x2"))).branch(isMultipleOfThree, Branched.withConsumer(ks -> ks.to("x3"))).branch(isMultipleOfFive, Branched.withConsumer(ks -> ks.to("x5"))).noDefaultBranch();
assertEquals(0, branches.size());
builder.build();
withDriver(driver -> {
final TestOutputTopic<Integer, String> x2 = driver.createOutputTopic("x2", new IntegerDeserializer(), new StringDeserializer());
final TestOutputTopic<Integer, String> x3 = driver.createOutputTopic("x3", new IntegerDeserializer(), new StringDeserializer());
final TestOutputTopic<Integer, String> x5 = driver.createOutputTopic("x5", new IntegerDeserializer(), new StringDeserializer());
assertEquals(Arrays.asList("V0", "V2", "V4", "V6"), x2.readValuesToList());
assertEquals(Arrays.asList("V3"), x3.readValuesToList());
assertEquals(Arrays.asList("V5"), x5.readValuesToList());
});
}
use of org.apache.kafka.common.serialization.IntegerDeserializer in project kafka by apache.
the class KStreamSplitTest method testResultingMap.
@Test
public void testResultingMap() {
final Map<String, KStream<Integer, String>> branches = source.split(Named.as("foo-")).branch(isEven, Branched.as("bar")).branch(isMultipleOfThree, Branched.withConsumer(ks -> {
})).branch(isMultipleOfFive, Branched.withFunction(ks -> null)).branch(isNegative, Branched.withFunction(ks -> ks)).branch(isMultipleOfSeven).defaultBranch();
assertEquals(4, branches.size());
// direct the branched streams into different topics named with branch name
for (final Map.Entry<String, KStream<Integer, String>> branch : branches.entrySet()) {
branch.getValue().to(branch.getKey());
}
builder.build();
withDriver(driver -> {
final TestOutputTopic<Integer, String> even = driver.createOutputTopic("foo-bar", new IntegerDeserializer(), new StringDeserializer());
final TestOutputTopic<Integer, String> negative = driver.createOutputTopic("foo-4", new IntegerDeserializer(), new StringDeserializer());
final TestOutputTopic<Integer, String> x7 = driver.createOutputTopic("foo-5", new IntegerDeserializer(), new StringDeserializer());
final TestOutputTopic<Integer, String> defaultBranch = driver.createOutputTopic("foo-0", new IntegerDeserializer(), new StringDeserializer());
assertEquals(Arrays.asList("V0", "V2", "V4", "V6"), even.readValuesToList());
assertEquals(Arrays.asList("V-1"), negative.readValuesToList());
assertEquals(Arrays.asList("V7"), x7.readValuesToList());
assertEquals(Arrays.asList("V1"), defaultBranch.readValuesToList());
});
}
use of org.apache.kafka.common.serialization.IntegerDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class EosTestDriver method verifyMax.
private static void verifyMax(final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> inputPerTopicPerPartition, final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> maxPerTopicPerPartition) {
final StringDeserializer stringDeserializer = new StringDeserializer();
final IntegerDeserializer integerDeserializer = new IntegerDeserializer();
final HashMap<String, Integer> currentMinPerKey = new HashMap<>();
for (final Map.Entry<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords : maxPerTopicPerPartition.entrySet()) {
final TopicPartition inputTopicPartition = new TopicPartition("repartition", partitionRecords.getKey().partition());
final List<ConsumerRecord<byte[], byte[]>> partitionInput = inputPerTopicPerPartition.get(inputTopicPartition);
final List<ConsumerRecord<byte[], byte[]>> partitionMax = partitionRecords.getValue();
if (partitionInput.size() != partitionMax.size()) {
throw new RuntimeException("Result verification failed: expected " + partitionInput.size() + " records for " + partitionRecords.getKey() + " but received " + partitionMax.size());
}
final Iterator<ConsumerRecord<byte[], byte[]>> inputRecords = partitionInput.iterator();
for (final ConsumerRecord<byte[], byte[]> receivedRecord : partitionMax) {
final ConsumerRecord<byte[], byte[]> input = inputRecords.next();
final String receivedKey = stringDeserializer.deserialize(receivedRecord.topic(), receivedRecord.key());
final int receivedValue = integerDeserializer.deserialize(receivedRecord.topic(), receivedRecord.value());
final String key = stringDeserializer.deserialize(input.topic(), input.key());
final int value = integerDeserializer.deserialize(input.topic(), input.value());
Integer max = currentMinPerKey.get(key);
if (max == null) {
max = Integer.MIN_VALUE;
}
max = Math.max(max, value);
currentMinPerKey.put(key, max);
if (!receivedKey.equals(key) || receivedValue != max.intValue()) {
throw new RuntimeException("Result verification failed for " + receivedRecord + " expected <" + key + "," + max + "> but was <" + receivedKey + "," + receivedValue + ">");
}
}
}
}
Aggregations