use of org.apache.kafka.common.serialization.IntegerSerializer in project apache-kafka-on-k8s by banzaicloud.
the class GlobalStateTaskTest method shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler.
@Test
public void shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler() throws Exception {
final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask(topology, context, stateMgr, new LogAndContinueExceptionHandler(), logContext);
final byte[] key = new LongSerializer().serialize(topic2, 1L);
final byte[] recordValue = new IntegerSerializer().serialize(topic2, 10);
maybeDeserialize(globalStateTask2, key, recordValue, false);
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project kafka-streams-examples by confluentinc.
the class SumLambdaIntegrationTest method shouldSumEvenNumbers.
@Test
public void shouldSumEvenNumbers() {
final List<Integer> inputValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
final List<KeyValue<Integer, Integer>> expectedValues = Arrays.asList(new KeyValue<>(1, 1), new KeyValue<>(1, 4), new KeyValue<>(1, 9), new KeyValue<>(1, 16), new KeyValue<>(1, 25));
//
// Step 1: Configure and start the processor topology.
//
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "sum-lambda-integration-test");
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "dummy config");
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass().getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass().getName());
// Use a temporary directory for storing state, which will be automatically removed after the test.
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
try (final TopologyTestDriver topologyTestDriver = new TopologyTestDriver(SumLambdaExample.getTopology(), streamsConfiguration)) {
//
// Step 2: Setup input and output topics.
//
final TestInputTopic<Void, Integer> input = topologyTestDriver.createInputTopic(SumLambdaExample.NUMBERS_TOPIC, new IntegrationTestUtils.NothingSerde<>(), new IntegerSerializer());
final TestOutputTopic<Integer, Integer> output = topologyTestDriver.createOutputTopic(SumLambdaExample.SUM_OF_ODD_NUMBERS_TOPIC, new IntegerDeserializer(), new IntegerDeserializer());
//
// Step 3: Produce some input data to the input topic.
//
input.pipeValueList(inputValues);
//
// Step 4: Verify the application's output data.
//
assertThat(output.readKeyValuesToList(), equalTo(expectedValues));
}
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project spring-cloud-stream by spring-cloud.
the class KafkaStreamsInteractiveQueryIntegrationTests method receiveAndValidateFoo.
private void receiveAndValidateFoo(ConfigurableApplicationContext context) {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
ProductCountApplication.Foo foo = context.getBean(ProductCountApplication.Foo.class);
assertThat(foo.getProductStock(123).equals(1L));
// perform assertions on HostInfo related methods in InteractiveQueryService
InteractiveQueryService interactiveQueryService = context.getBean(InteractiveQueryService.class);
HostInfo currentHostInfo = interactiveQueryService.getCurrentHostInfo();
assertThat(currentHostInfo.host() + ":" + currentHostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
final KeyQueryMetadata keyQueryMetadata = interactiveQueryService.getKeyQueryMetadata("prod-id-count-store", 123, new IntegerSerializer());
final HostInfo activeHost = keyQueryMetadata.getActiveHost();
assertThat(activeHost.host() + ":" + activeHost.port()).isEqualTo(embeddedKafka.getBrokersAsString());
final KafkaStreams kafkaStreams = interactiveQueryService.getKafkaStreams("prod-id-count-store", 123, new IntegerSerializer());
assertThat(kafkaStreams).isNotNull();
assertThat(interactiveQueryService.getKafkaStreams("non-existent-store", 123, new IntegerSerializer())).isNull();
HostInfo hostInfo = interactiveQueryService.getHostInfo("prod-id-count-store", 123, new IntegerSerializer());
assertThat(hostInfo.host() + ":" + hostInfo.port()).isEqualTo(embeddedKafka.getBrokersAsString());
assertThatThrownBy(() -> interactiveQueryService.getHostInfo("prod-id-count-store-foo", 123, new IntegerSerializer())).isInstanceOf(IllegalStateException.class).hasMessageContaining("Error when retrieving state store.");
final List<HostInfo> hostInfos = interactiveQueryService.getAllHostsInfo("prod-id-count-store");
assertThat(hostInfos.size()).isEqualTo(1);
final HostInfo hostInfo1 = hostInfos.get(0);
assertThat(hostInfo1.host() + ":" + hostInfo1.port()).isEqualTo(embeddedKafka.getBrokersAsString());
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project jbpm-work-items by kiegroup.
the class KafkaWorkItemHandlerTest method buildKafkaWIHInteger.
private void buildKafkaWIHInteger(boolean autocomplete) {
// MockProducer only invokes serialize if partition was defined -needed for cast exceptions during incorrect type tests
PartitionInfo partitionInfo = new PartitionInfo(TOPIC, 0, null, null, null);
Cluster cluster = new Cluster(null, emptyList(), asList(partitionInfo), emptySet(), emptySet());
mockProducerInteger = new MockProducer<>(cluster, autocomplete, new DefaultPartitioner(), new IntegerSerializer(), new IntegerSerializer());
handler = new KafkaWorkItemHandler(new Properties(), mockProducerInteger);
}
use of org.apache.kafka.common.serialization.IntegerSerializer in project debezium by debezium.
the class KafkaClusterTest method shouldStartClusterAndAllowAsynchronousProductionAndAutomaticConsumersToUseIt.
@Test
@SkipLongRunning
public void shouldStartClusterAndAllowAsynchronousProductionAndAutomaticConsumersToUseIt() throws Exception {
Testing.Debug.enable();
final String topicName = "topicA";
final CountDownLatch completion = new CountDownLatch(2);
final int numMessages = 3;
final AtomicLong messagesRead = new AtomicLong(0);
// Start a cluster and create a topic ...
cluster.addBrokers(1).startup();
cluster.createTopics(topicName);
// Consume messages asynchronously ...
Stopwatch sw = Stopwatch.reusable().start();
cluster.useTo().consumeIntegers(topicName, numMessages, 10, TimeUnit.SECONDS, completion::countDown, (key, value) -> {
messagesRead.incrementAndGet();
return true;
});
// Produce some messages interactively ...
cluster.useTo().produce("manual", new StringSerializer(), new IntegerSerializer(), produer -> {
produer.write(topicName, "key1", 1);
produer.write(topicName, "key2", 2);
produer.write(topicName, "key3", 3);
completion.countDown();
});
// Wait for the consumer to to complete ...
if (completion.await(10, TimeUnit.SECONDS)) {
sw.stop();
Testing.debug("The consumer completed normally in " + sw.durations());
} else {
Testing.debug("Consumer did not completed normally");
}
assertThat(messagesRead.get()).isEqualTo(numMessages);
}
Aggregations