use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class EosIntegrationTest method runSimpleCopyTest.
private void runSimpleCopyTest(final int numberOfRestarts, final String inputTopic, final String throughTopic, final String outputTopic) throws Exception {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<Long, Long> input = builder.stream(inputTopic);
KStream<Long, Long> output = input;
if (throughTopic != null) {
output = input.through(throughTopic);
}
output.to(outputTopic);
for (int i = 0; i < numberOfRestarts; ++i) {
final KafkaStreams streams = new KafkaStreams(builder.build(), StreamsTestUtils.getStreamsConfig(applicationId, CLUSTER.bootstrapServers(), Serdes.LongSerde.class.getName(), Serdes.LongSerde.class.getName(), new Properties() {
{
put(StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_RECORDS_CONFIG), 1);
put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE);
}
}));
try {
streams.start();
final List<KeyValue<Long, Long>> inputData = prepareData(i * 100, i * 100 + 10L, 0L, 1L);
IntegrationTestUtils.produceKeyValuesSynchronously(inputTopic, inputData, TestUtils.producerConfig(CLUSTER.bootstrapServers(), LongSerializer.class, LongSerializer.class), CLUSTER.time);
final List<KeyValue<Long, Long>> committedRecords = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(TestUtils.consumerConfig(CLUSTER.bootstrapServers(), CONSUMER_GROUP_ID, LongDeserializer.class, LongDeserializer.class, new Properties() {
{
put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.name().toLowerCase(Locale.ROOT));
}
}), outputTopic, inputData.size());
checkResultPerKey(committedRecords, inputData);
} finally {
streams.close();
}
}
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class GlobalKTableIntegrationTest method before.
@Before
public void before() throws InterruptedException {
testNo++;
builder = new StreamsBuilder();
createTopics();
streamsConfiguration = new Properties();
final String applicationId = "globalTableTopic-table-test-" + testNo;
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
globalTable = builder.globalTable(globalTableTopic, Consumed.with(Serdes.Long(), Serdes.String()), Materialized.<Long, String, KeyValueStore<Bytes, byte[]>>as(globalStore).withKeySerde(Serdes.Long()).withValueSerde(Serdes.String()));
final Consumed<String, Long> stringLongConsumed = Consumed.with(Serdes.String(), Serdes.Long());
stream = builder.stream(streamTopic, stringLongConsumed);
foreachAction = new ForeachAction<String, String>() {
@Override
public void apply(final String key, final String value) {
results.put(key, value);
}
};
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class InternalTopicIntegrationTest method shouldCompactAndDeleteTopicsForWindowStoreChangelogs.
@Test
public void shouldCompactAndDeleteTopicsForWindowStoreChangelogs() throws Exception {
final String appID = APP_ID + "-compact-delete";
streamsProp.put(StreamsConfig.APPLICATION_ID_CONFIG, appID);
//
// Step 1: Configure and start a simple word count topology
//
StreamsBuilder builder = new StreamsBuilder();
KStream<String, String> textLines = builder.stream(DEFAULT_INPUT_TOPIC);
final int durationMs = 2000;
textLines.flatMapValues(new ValueMapper<String, Iterable<String>>() {
@Override
public Iterable<String> apply(final String value) {
return Arrays.asList(value.toLowerCase(Locale.getDefault()).split("\\W+"));
}
}).groupBy(MockMapper.<String, String>selectValueMapper()).windowedBy(TimeWindows.of(1000).until(2000)).count(Materialized.<String, Long, WindowStore<org.apache.kafka.common.utils.Bytes, byte[]>>as("CountWindows"));
KafkaStreams streams = new KafkaStreams(builder.build(), streamsProp);
streams.start();
//
// Step 2: Produce some input data to the input topic.
//
produceData(Arrays.asList("hello", "world", "world", "hello world"));
//
// Step 3: Verify the state changelog topics are compact
//
streams.close();
final Properties properties = getTopicProperties(ProcessorStateManager.storeChangelogTopic(appID, "CountWindows"));
final List<String> policies = Arrays.asList(properties.getProperty(LogConfig.CleanupPolicyProp()).split(","));
assertEquals(2, policies.size());
assertTrue(policies.contains(LogConfig.Compact()));
assertTrue(policies.contains(LogConfig.Delete()));
// retention should be 1 day + the window duration
final long retention = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS) + durationMs;
assertEquals(retention, Long.parseLong(properties.getProperty(LogConfig.RetentionMsProp())));
final Properties repartitionProps = getTopicProperties(appID + "-CountWindows-repartition");
assertEquals(LogConfig.Delete(), repartitionProps.getProperty(LogConfig.CleanupPolicyProp()));
assertEquals(4, repartitionProps.size());
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationDedupIntegrationTest method before.
@Before
public void before() throws InterruptedException {
testNo++;
builder = new StreamsBuilder();
createTopics();
streamsConfiguration = new Properties();
String applicationId = "kgrouped-stream-test-" + testNo;
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, COMMIT_INTERVAL_MS);
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 10 * 1024 * 1024L);
streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
KeyValueMapper<Integer, String, String> mapper = MockMapper.<Integer, String>selectValueMapper();
stream = builder.stream(streamOneInput, Consumed.with(Serdes.Integer(), Serdes.String()));
groupedStream = stream.groupBy(mapper, Serialized.with(Serdes.String(), Serdes.String()));
reducer = new Reducer<String>() {
@Override
public String apply(String value1, String value2) {
return value1 + ":" + value2;
}
};
}
use of org.apache.kafka.streams.StreamsBuilder in project apache-kafka-on-k8s by banzaicloud.
the class KStreamAggregationIntegrationTest method before.
@Before
public void before() throws InterruptedException {
testNo++;
builder = new StreamsBuilder();
createTopics();
streamsConfiguration = new Properties();
final String applicationId = "kgrouped-stream-test-" + testNo;
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
streamsConfiguration.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true);
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass());
final KeyValueMapper<Integer, String, String> mapper = MockMapper.selectValueMapper();
stream = builder.stream(streamOneInput, Consumed.with(Serdes.Integer(), Serdes.String()));
groupedStream = stream.groupBy(mapper, Serialized.with(Serdes.String(), Serdes.String()));
reducer = new Reducer<String>() {
@Override
public String apply(final String value1, final String value2) {
return value1 + ":" + value2;
}
};
initializer = new Initializer<Integer>() {
@Override
public Integer apply() {
return 0;
}
};
aggregator = new Aggregator<String, String, Integer>() {
@Override
public Integer apply(final String aggKey, final String value, final Integer aggregate) {
return aggregate + value.length();
}
};
}
Aggregations