use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KafkaStreamsTest method testIllegalMetricsConfig.
@Test(expected = ConfigException.class)
public void testIllegalMetricsConfig() {
final Properties props = new Properties();
props.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "appId");
props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
props.setProperty(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, "illegalConfig");
final KStreamBuilder builder = new KStreamBuilder();
final KafkaStreams streams = new KafkaStreams(builder, props);
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class FanoutIntegrationTest method shouldFanoutTheInput.
@Test
public void shouldFanoutTheInput() throws Exception {
final List<String> inputValues = Arrays.asList("Hello", "World");
final List<String> expectedValuesForB = new ArrayList<>();
final List<String> expectedValuesForC = new ArrayList<>();
for (final String input : inputValues) {
expectedValuesForB.add(input.toUpperCase(Locale.getDefault()));
expectedValuesForC.add(input.toLowerCase(Locale.getDefault()));
}
//
// Step 1: Configure and start the processor topology.
//
final KStreamBuilder builder = new KStreamBuilder();
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "fanout-integration-test");
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL_MS);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, cacheSizeBytes);
final KStream<byte[], String> stream1 = builder.stream(INPUT_TOPIC_A);
final KStream<byte[], String> stream2 = stream1.mapValues(new ValueMapper<String, String>() {
@Override
public String apply(final String value) {
return value.toUpperCase(Locale.getDefault());
}
});
final KStream<byte[], String> stream3 = stream1.mapValues(new ValueMapper<String, String>() {
@Override
public String apply(final String value) {
return value.toLowerCase(Locale.getDefault());
}
});
stream2.to(OUTPUT_TOPIC_B);
stream3.to(OUTPUT_TOPIC_C);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
streams.start();
//
// Step 2: Produce some input data to the input topic.
//
final Properties producerConfig = new Properties();
producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0);
producerConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
producerConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
IntegrationTestUtils.produceValuesSynchronously(INPUT_TOPIC_A, inputValues, producerConfig, mockTime);
//
// Step 3: Verify the application's output data.
//
// Verify output topic B
final Properties consumerConfigB = new Properties();
consumerConfigB.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerConfigB.put(ConsumerConfig.GROUP_ID_CONFIG, "fanout-integration-test-standard-consumer-topicB");
consumerConfigB.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerConfigB.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
consumerConfigB.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
final List<String> actualValuesForB = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfigB, OUTPUT_TOPIC_B, inputValues.size());
assertThat(actualValuesForB, equalTo(expectedValuesForB));
// Verify output topic C
final Properties consumerConfigC = new Properties();
consumerConfigC.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerConfigC.put(ConsumerConfig.GROUP_ID_CONFIG, "fanout-integration-test-standard-consumer-topicC");
consumerConfigC.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerConfigC.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
consumerConfigC.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
final List<String> actualValuesForC = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfigC, OUTPUT_TOPIC_C, inputValues.size());
streams.close();
assertThat(actualValuesForC, equalTo(expectedValuesForC));
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class InternalTopicIntegrationTest method shouldCompactTopicsForStateChangelogs.
@Test
public void shouldCompactTopicsForStateChangelogs() throws Exception {
//
// Step 1: Configure and start a simple word count topology
//
final Serde<String> stringSerde = Serdes.String();
final Serde<Long> longSerde = Serdes.Long();
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "compact-topics-integration-test");
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> textLines = builder.stream(DEFAULT_INPUT_TOPIC);
final KStream<String, Long> wordCounts = textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(final String value) {
return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
}
}).groupBy(MockKeyValueMapper.<String, String>SelectValueMapper()).count("Counts").toStream();
wordCounts.to(stringSerde, longSerde, DEFAULT_OUTPUT_TOPIC);
// Remove any state from previous test runs
IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
final KafkaStreams streams = new KafkaStreams(builder, streamsConfiguration);
streams.start();
//
// Step 2: Produce some input data to the input topic.
//
produceData(Arrays.asList("hello", "world", "world", "hello world"));
//
// Step 3: Verify the state changelog topics are compact
//
streams.close();
final Properties properties = getTopicConfigProperties(ProcessorStateManager.storeChangelogTopic(applicationId, "Counts"));
assertEquals(LogConfig.Compact(), properties.getProperty(LogConfig.CleanupPolicyProp()));
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class ResetIntegrationTest method setupTopologyWithIntermediateUserTopic.
private KStreamBuilder setupTopologyWithIntermediateUserTopic(final String outputTopic2) {
final KStreamBuilder builder = new KStreamBuilder();
final KStream<Long, String> input = builder.stream(INPUT_TOPIC);
// use map to trigger internal re-partitioning before groupByKey
input.map(new KeyValueMapper<Long, String, KeyValue<Long, String>>() {
@Override
public KeyValue<Long, String> apply(final Long key, final String value) {
return new KeyValue<>(key, value);
}
}).groupByKey().count("global-count").to(Serdes.Long(), Serdes.Long(), OUTPUT_TOPIC);
input.through(INTERMEDIATE_USER_TOPIC).groupByKey().count(TimeWindows.of(35).advanceBy(10), "count").toStream().map(new KeyValueMapper<Windowed<Long>, Long, KeyValue<Long, Long>>() {
@Override
public KeyValue<Long, Long> apply(final Windowed<Long> key, final Long value) {
return new KeyValue<>(key.window().start() + key.window().end(), value);
}
}).to(Serdes.Long(), Serdes.Long(), outputTopic2);
return builder;
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KafkaStreamsTest method createKafkaStreams.
private KafkaStreams createKafkaStreams() {
final Properties props = new Properties();
props.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "appId");
props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
final KStreamBuilder builder = new KStreamBuilder();
return new KafkaStreams(builder, props);
}
Aggregations