use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class GlobalStateTaskTest method shouldThrowStreamsExceptionWhenValueDeserializationFails.
@Test
public void shouldThrowStreamsExceptionWhenValueDeserializationFails() {
final byte[] key = new IntegerSerializer().serialize(topic2, 1);
final byte[] recordValue = new LongSerializer().serialize(topic2, 10L);
maybeDeserialize(globalStateTask, key, recordValue, true);
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KTableKTableForeignKeyJoinScenarioTest method validateTopologyCanProcessData.
private void validateTopologyCanProcessData(final StreamsBuilder builder) {
final Properties config = new Properties();
final String safeTestName = safeUniqueTestName(getClass(), testName);
config.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.IntegerSerde.class.getName());
config.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
config.setProperty(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
try (final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(builder.build(), config)) {
final TestInputTopic<Integer, String> aTopic = topologyTestDriver.createInputTopic("A", new IntegerSerializer(), new StringSerializer());
final TestInputTopic<Integer, String> bTopic = topologyTestDriver.createInputTopic("B", new IntegerSerializer(), new StringSerializer());
final TestOutputTopic<Integer, String> output = topologyTestDriver.createOutputTopic("output", new IntegerDeserializer(), new StringDeserializer());
aTopic.pipeInput(1, "999-alpha");
bTopic.pipeInput(999, "beta");
final Map<Integer, String> x = output.readKeyValuesToMap();
assertThat(x, is(Collections.singletonMap(1, "(999-alpha,(999-alpha,beta))")));
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KStreamTransformTest method testTransform.
// Old PAPI. Needs to be migrated.
@SuppressWarnings("deprecation")
@Test
public void testTransform() {
final StreamsBuilder builder = new StreamsBuilder();
final TransformerSupplier<Number, Number, KeyValue<Integer, Integer>> transformerSupplier = () -> new Transformer<Number, Number, KeyValue<Integer, Integer>>() {
private int total = 0;
@Override
public void init(final ProcessorContext context) {
context.schedule(Duration.ofMillis(1), PunctuationType.WALL_CLOCK_TIME, timestamp -> context.forward(-1, (int) timestamp, To.all().withTimestamp(timestamp)));
}
@Override
public KeyValue<Integer, Integer> transform(final Number key, final Number value) {
total += value.intValue();
return KeyValue.pair(key.intValue() * 2, total);
}
@Override
public void close() {
}
};
final int[] expectedKeys = { 1, 10, 100, 1000 };
final MockProcessorSupplier<Integer, Integer> processor = new MockProcessorSupplier<>();
final KStream<Integer, Integer> stream = builder.stream(TOPIC_NAME, Consumed.with(Serdes.Integer(), Serdes.Integer()));
stream.transform(transformerSupplier).process(processor);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), Instant.ofEpochMilli(0L))) {
final TestInputTopic<Integer, Integer> inputTopic = driver.createInputTopic(TOPIC_NAME, new IntegerSerializer(), new IntegerSerializer());
for (final int expectedKey : expectedKeys) {
inputTopic.pipeInput(expectedKey, expectedKey * 10, expectedKey / 2L);
}
driver.advanceWallClockTime(Duration.ofMillis(2));
driver.advanceWallClockTime(Duration.ofMillis(1));
final KeyValueTimestamp[] expected = { new KeyValueTimestamp<>(2, 10, 0), new KeyValueTimestamp<>(20, 110, 5), new KeyValueTimestamp<>(200, 1110, 50), new KeyValueTimestamp<>(2000, 11110, 500), new KeyValueTimestamp<>(-1, 2, 2), new KeyValueTimestamp<>(-1, 3, 3) };
assertEquals(expected.length, processor.theCapturedProcessor().processed().size());
for (int i = 0; i < expected.length; i++) {
assertEquals(expected[i], processor.theCapturedProcessor().processed().get(i));
}
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KTableFilterTest method doTestSendingOldValue.
private void doTestSendingOldValue(final StreamsBuilder builder, final KTableImpl<String, Integer, Integer> table1, final KTableImpl<String, Integer, Integer> table2, final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build();
topology.addProcessor("proc1", supplier, table1.name);
topology.addProcessor("proc2", supplier, table2.name);
final boolean parentSendOldVals = table1.sendingOldValueEnabled();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
final MockApiProcessor<String, Integer, Void, Void> table1Output = processors.get(0);
final MockApiProcessor<String, Integer, Void, Void> table2Output = processors.get(1);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
table2Output.checkEmptyAndClearProcessResult();
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, parentSendOldVals ? 1 : null), 15), new KeyValueTimestamp<>("B", new Change<>(2, parentSendOldVals ? 1 : null), 8));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic.pipeInput("A", 3, 20L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, parentSendOldVals ? 2 : null), 20));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, 2), 20));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
table1Output.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, parentSendOldVals ? 3 : null), 10), new KeyValueTimestamp<>("B", new Change<>(null, parentSendOldVals ? 2 : null), 20));
table2Output.checkAndClearProcessResult(new KeyValueTimestamp<>("B", new Change<>(null, 2), 20));
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka by apache.
the class KTableFilterTest method doTestNotSendingOldValue.
private void doTestNotSendingOldValue(final StreamsBuilder builder, final KTableImpl<String, Integer, Integer> table1, final KTableImpl<String, Integer, Integer> table2, final String topic1) {
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
builder.build().addProcessor("proc1", supplier, table1.name);
builder.build().addProcessor("proc2", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopic = driver.createInputTopic(topic1, new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
inputTopic.pipeInput("A", 1, 5L);
inputTopic.pipeInput("B", 1, 10L);
inputTopic.pipeInput("C", 1, 15L);
final List<MockApiProcessor<String, Integer, Void, Void>> processors = supplier.capturedProcessors(2);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 5), new KeyValueTimestamp<>("B", new Change<>(null, null), 10), new KeyValueTimestamp<>("C", new Change<>(null, null), 15));
inputTopic.pipeInput("A", 2, 15L);
inputTopic.pipeInput("B", 2, 8L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 15), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic.pipeInput("A", 3, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 20));
inputTopic.pipeInput("A", null, 10L);
inputTopic.pipeInput("B", null, 20L);
processors.get(0).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10), new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
processors.get(1).checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 10), new KeyValueTimestamp<>("B", new Change<>(null, null), 20));
}
}
Aggregations