use of kafka.tools.StreamsResetter in project kafka by apache.
the class ResetIntegrationTest method cleanGlobal.
private void cleanGlobal(final String intermediateUserTopic) {
final String[] parameters;
if (intermediateUserTopic != null) {
parameters = new String[] { "--application-id", APP_ID + testNo, "--bootstrap-server", CLUSTER.bootstrapServers(), "--zookeeper", CLUSTER.zKConnectString(), "--input-topics", INPUT_TOPIC, "--intermediate-topics", INTERMEDIATE_USER_TOPIC };
} else {
parameters = new String[] { "--application-id", APP_ID + testNo, "--bootstrap-server", CLUSTER.bootstrapServers(), "--zookeeper", CLUSTER.zKConnectString(), "--input-topics", INPUT_TOPIC };
}
final Properties cleanUpConfig = new Properties();
cleanUpConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 100);
cleanUpConfig.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "" + CLEANUP_CONSUMER_TIMEOUT);
final int exitCode = new StreamsResetter().run(parameters, cleanUpConfig);
Assert.assertEquals(0, exitCode);
}
use of kafka.tools.StreamsResetter in project kafka-streams-examples by confluentinc.
the class ApplicationResetIntegrationTest method shouldReprocess.
@Test
public void shouldReprocess() throws Exception {
final List<String> inputValues = Arrays.asList("Hello World", "Hello Kafka Streams", "All streams lead to Kafka");
final List<KeyValue<String, Long>> expectedResult = Arrays.asList(KeyValue.pair("Hello", 1L), KeyValue.pair("Hello", 2L), KeyValue.pair("All", 1L));
//
// Step 1: Configure and start the processor topology.
//
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
// Use a temporary directory for storing state, which will be automatically removed after the test.
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getAbsolutePath());
KafkaStreams streams = ApplicationResetExample.run(new String[0], streamsConfiguration);
//
// Step 2: Produce some input data to the input topic.
//
final Properties producerConfig = new Properties();
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputValues, producerConfig);
//
// Step 3: Verify the application's output data.
//
final Properties consumerConfig = new Properties();
consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "application-reset-integration-test-standard-consumer-output-topic");
consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class);
final List<KeyValue<String, Long>> result = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, inputValues.size());
assertThat(result).isEqualTo(expectedResult);
streams.close();
//
// Step 4: Reset application.
//
// wait for application to be completely shut down
final AdminClient adminClient = AdminClient.createSimplePlaintext(CLUSTER.bootstrapServers());
while (!adminClient.describeConsumerGroup(applicationId, 0).consumers().get().isEmpty()) {
Utils.sleep(50);
}
// reset application
final int exitCode = new StreamsResetter().run(new String[] { "--application-id", applicationId, "--bootstrap-servers", CLUSTER.bootstrapServers(), "--zookeeper", CLUSTER.zookeeperConnect(), "--input-topics", inputTopic });
Assert.assertEquals(0, exitCode);
// wait for reset client to be completely closed
while (!adminClient.describeConsumerGroup(applicationId, 0).consumers().get().isEmpty()) {
Utils.sleep(50);
}
//
// Step 5: Rerun application
//
streams = ApplicationResetExample.run(new String[] { "--reset" }, streamsConfiguration);
//
// Step 6: Verify the application's output data.
//
final List<KeyValue<String, Long>> resultRerun = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, inputValues.size());
assertThat(resultRerun).isEqualTo(expectedResult);
streams.close();
}
use of kafka.tools.StreamsResetter in project apache-kafka-on-k8s by banzaicloud.
the class StreamsResetterTest method invokeGetDateTimeMethod.
private void invokeGetDateTimeMethod(final SimpleDateFormat format) throws ParseException {
final Date checkpoint = new Date();
final StreamsResetter streamsResetter = new StreamsResetter();
final String formattedCheckpoint = format.format(checkpoint);
streamsResetter.getDateTime(formattedCheckpoint);
}
use of kafka.tools.StreamsResetter in project apache-kafka-on-k8s by banzaicloud.
the class AbstractResetIntegrationTest method shouldNotAllowToResetWhenIntermediateTopicAbsent.
public void shouldNotAllowToResetWhenIntermediateTopicAbsent() throws Exception {
appID = testId + "-not-reset-without-intermediate-topic";
final String[] parameters = new String[] { "--application-id", appID, "--bootstrap-servers", cluster.bootstrapServers(), "--intermediate-topics", NON_EXISTING_TOPIC, "--execute" };
final Properties cleanUpConfig = new Properties();
cleanUpConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 100);
cleanUpConfig.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "" + CLEANUP_CONSUMER_TIMEOUT);
final int exitCode = new StreamsResetter().run(parameters, cleanUpConfig);
Assert.assertEquals(1, exitCode);
}
use of kafka.tools.StreamsResetter in project apache-kafka-on-k8s by banzaicloud.
the class AbstractResetIntegrationTest method shouldNotAllowToResetWhileStreamsIsRunning.
void shouldNotAllowToResetWhileStreamsIsRunning() throws Exception {
appID = testId + "-not-reset-during-runtime";
final String[] parameters = new String[] { "--application-id", appID, "--bootstrap-servers", cluster.bootstrapServers(), "--input-topics", NON_EXISTING_TOPIC, "--execute" };
final Properties cleanUpConfig = new Properties();
cleanUpConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 100);
cleanUpConfig.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "" + CLEANUP_CONSUMER_TIMEOUT);
streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, appID);
// RUN
streams = new KafkaStreams(setupTopologyWithoutIntermediateUserTopic(), streamsConfig);
streams.start();
final int exitCode = new StreamsResetter().run(parameters, cleanUpConfig);
Assert.assertEquals(1, exitCode);
streams.close();
}
Aggregations