Search in sources :

Example 6 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.

the class EosIntegrationTest method runSimpleCopyTest.

private void runSimpleCopyTest(final int numberOfRestarts, final String inputTopic, final String throughTopic, final String outputTopic) throws Exception {
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<Long, Long> input = builder.stream(inputTopic);
    KStream<Long, Long> output = input;
    if (throughTopic != null) {
        output = input.through(throughTopic);
    }
    output.to(outputTopic);
    for (int i = 0; i < numberOfRestarts; ++i) {
        final KafkaStreams streams = new KafkaStreams(builder.build(), StreamsTestUtils.getStreamsConfig(applicationId, CLUSTER.bootstrapServers(), Serdes.LongSerde.class.getName(), Serdes.LongSerde.class.getName(), new Properties() {

            {
                put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), 1);
                put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);
            }
        }));
        try {
            streams.start();
            final List<KeyValue<Long, Long>> inputData = prepareData(i * 100, i * 100 + 10L, 0L, 1L);
            IntegrationTestUtils.produceKeyValuesSynchronously(inputTopic, inputData, TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class), CLUSTER.time);
            final List<KeyValue<Long, Long>> committedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(TestUtils.consumerConfig(CLUSTER.bootstrapServers(), CONSUMER_GROUP_ID, LongDeserializer.class, LongDeserializer.class, new Properties() {

                {
                    put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
                }
            }), outputTopic, inputData.size());
            checkResultPerKey(committedRecords, inputData);
        } finally {
            streams.close();
        }
    }
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) LongSerializer(org.apache.kafka.common.serialization.LongSerializer) Serdes(org.apache.kafka.common.serialization.Serdes) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Properties(java.util.Properties)

Example 7 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.

the class GlobalKTableIntegrationTest method before.

@Before
public void before() throws InterruptedException {
    testNo++;
    builder = new StreamsBuilder();
    createTopics();
    streamsConfiguration = new Properties();
    final String applicationId = "globalTableTopic-table-test-" + testNo;
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
    streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
    globalTable = builder.globalTable(globalTableTopic, Consumed.with(Serdes.Long(), Serdes.String()), Materialized.<Long, String, KeyValueStore<Bytes, byte[]>>as(globalStore).withKeySerde(Serdes.Long()).withValueSerde(Serdes.String()));
    final Consumed<String, Long> stringLongConsumed = Consumed.with(Serdes.String(), Serdes.Long());
    stream = builder.stream(streamTopic, stringLongConsumed);
    foreachAction = new ForeachAction<String, String>() {

        @Override
        public void apply(final String key, final String value) {
            results.put(key, value);
        }
    };
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ReadOnlyKeyValueStore(org.apache.kafka.streams.state.ReadOnlyKeyValueStore) Properties(java.util.Properties) Before(org.junit.Before)

Example 8 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.

the class InternalTopicIntegrationTest method shouldCompactAndDeleteTopicsForWindowStoreChangelogs.

@Test
public void shouldCompactAndDeleteTopicsForWindowStoreChangelogs() throws Exception {
    final String appID = APP_ID + "-compact-delete";
    streamsProp.put(StreamsConfig.APPLICATION_ID_CONFIG, appID);
    // 
    // Step 1: Configure and start a simple word count topology
    // 
    StreamsBuilder builder = new StreamsBuilder();
    KStream<String, String> textLines = builder.stream(DEFAULT_INPUT_TOPIC);
    final int durationMs = 2000;
    textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {

        @Override
        public Iterable<String> apply(final String value) {
            return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
        }
    }).groupBy(MockMapper.<String, String>selectValueMapper()).windowedBy(TimeWindows.of(1000).until(2000)).count(Materialized.<String, Long, WindowStore<org.apache.kafka.common.utils.Bytes, byte[]>>as("CountWindows"));
    KafkaStreams streams = new KafkaStreams(builder.build(), streamsProp);
    streams.start();
    // 
    // Step 2: Produce some input data to the input topic.
    // 
    produceData(Arrays.asList("hello", "world", "world", "hello world"));
    // 
    // Step 3: Verify the state changelog topics are compact
    // 
    streams.close();
    final Properties properties = getTopicProperties(ProcessorStateManager.storeChangelogTopic(appID, "CountWindows"));
    final List<String> policies = Arrays.asList(properties.getProperty(LogConfig.CleanupPolicyProp()).split(","));
    assertEquals(2, policies.size());
    assertTrue(policies.contains(LogConfig.Compact()));
    assertTrue(policies.contains(LogConfig.Delete()));
    // retention should be 1 day + the window duration
    final long retention = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS) + durationMs;
    assertEquals(retention, Long.parseLong(properties.getProperty(LogConfig.RetentionMsProp())));
    final Properties repartitionProps = getTopicProperties(appID + "-CountWindows-repartition");
    assertEquals(LogConfig.Delete(), repartitionProps.getProperty(LogConfig.CleanupPolicyProp()));
    assertEquals(4, repartitionProps.size());
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) KafkaStreams(org.apache.kafka.streams.KafkaStreams) ValueMapper(org.apache.kafka.streams.kstream.ValueMapper) Properties(java.util.Properties) IntegrationTest(org.apache.kafka.test.IntegrationTest) Test(org.junit.Test)

Example 9 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.

the class KStreamAggregationDedupIntegrationTest method before.

@Before
public void before() throws InterruptedException {
    testNo++;
    builder = new StreamsBuilder();
    createTopics();
    streamsConfiguration = new Properties();
    String applicationId = "kgrouped-stream-test-" + testNo;
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL_MS);
    streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 10 * 1024 * 1024L);
    streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    KeyValueMapper<Integer, String, String> mapper = MockMapper.<Integer, String>selectValueMapper();
    stream = builder.stream(streamOneInput, Consumed.with(Serdes.Integer(), Serdes.String()));
    groupedStream = stream.groupBy(mapper, Serialized.with(Serdes.String(), Serdes.String()));
    reducer = new Reducer<String>() {

        @Override
        public String apply(String value1, String value2) {
            return value1 + ":" + value2;
        }
    };
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Before(org.junit.Before)

Example 10 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.

the class KStreamAggregationIntegrationTest method before.

@Before
public void before() throws InterruptedException {
    testNo++;
    builder = new StreamsBuilder();
    createTopics();
    streamsConfiguration = new Properties();
    final String applicationId = "kgrouped-stream-test-" + testNo;
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
    streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
    final KeyValueMapper<Integer, String, String> mapper = MockMapper.selectValueMapper();
    stream = builder.stream(streamOneInput, Consumed.with(Serdes.Integer(), Serdes.String()));
    groupedStream = stream.groupBy(mapper, Serialized.with(Serdes.String(), Serdes.String()));
    reducer = new Reducer<String>() {

        @Override
        public String apply(final String value1, final String value2) {
            return value1 + ":" + value2;
        }
    };
    initializer = new Initializer<Integer>() {

        @Override
        public Integer apply() {
            return 0;
        }
    };
    aggregator = new Aggregator<String, String, Integer>() {

        @Override
        public Integer apply(final String aggKey, final String value, final Integer aggregate) {
            return aggregate + value.length();
        }
    };
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Properties(java.util.Properties) Before(org.junit.Before)

Aggregations

StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)561 Test (org.junit.Test)430 Properties (java.util.Properties)238 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)173 KafkaStreams (org.apache.kafka.streams.KafkaStreams)171 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)157 KeyValue (org.apache.kafka.streams.KeyValue)156 Serdes (org.apache.kafka.common.serialization.Serdes)134 StreamsConfig (org.apache.kafka.streams.StreamsConfig)107 IntegrationTest (org.apache.kafka.test.IntegrationTest)104 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)95 Before (org.junit.Before)93 KStream (org.apache.kafka.streams.kstream.KStream)89 Topology (org.apache.kafka.streams.Topology)88 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)86 Consumed (org.apache.kafka.streams.kstream.Consumed)80 Duration (java.time.Duration)77 KTable (org.apache.kafka.streams.kstream.KTable)76 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)75 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)75