use of org.apache.kafka.streams.kstream.ValueMapper in project apache-kafka-on-k8s by banzaicloud.
the class StreamsStandByReplicaTest method main.
public static void main(final String[] args) throws IOException {
System.out.println("StreamsTest instance started");
final String kafka = args.length > 0 ? args[0] : "localhost:9092";
final String propFileName = args.length > 1 ? args[1] : null;
final String additionalConfigs = args.length > 2 ? args[2] : null;
final Serde<String> stringSerde = Serdes.String();
final Properties streamsProperties = Utils.loadProps(propFileName);
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafka-streams-standby-tasks");
streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
streamsProperties.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100);
streamsProperties.put(StreamsConfig.NUM_STANDBY_REPLICAS_CONFIG, 1);
streamsProperties.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
streamsProperties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsProperties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsProperties.put(StreamsConfig.producerPrefix(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG), true);
if (additionalConfigs == null) {
System.err.println("additional configs are not provided");
System.err.flush();
System.exit(1);
}
final Map<String, String> updated = SystemTestUtil.parseConfigs(additionalConfigs);
System.out.println("Updating configs with " + updated);
final String sourceTopic = updated.remove("sourceTopic");
final String sinkTopic1 = updated.remove("sinkTopic1");
final String sinkTopic2 = updated.remove("sinkTopic2");
if (sourceTopic == null || sinkTopic1 == null || sinkTopic2 == null) {
System.err.println(String.format("one or more required topics null sourceTopic[%s], sinkTopic1[%s], sinkTopic2[%s]", sourceTopic, sinkTopic1, sinkTopic2));
System.err.flush();
System.exit(1);
}
streamsProperties.putAll(updated);
if (!confirmCorrectConfigs(streamsProperties)) {
System.err.println(String.format("ERROR: Did not have all required configs expected to contain %s, %s, %s, %s", StreamsConfig.consumerPrefix(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.RETRIES_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG), StreamsConfig.producerPrefix(ProducerConfig.MAX_BLOCK_MS_CONFIG)));
System.exit(1);
}
final StreamsBuilder builder = new StreamsBuilder();
String inMemoryStoreName = "in-memory-store";
String persistentMemoryStoreName = "persistent-memory-store";
KeyValueBytesStoreSupplier inMemoryStoreSupplier = Stores.inMemoryKeyValueStore(inMemoryStoreName);
KeyValueBytesStoreSupplier persistentStoreSupplier = Stores.persistentKeyValueStore(persistentMemoryStoreName);
KStream<String, String> inputStream = builder.stream(sourceTopic, Consumed.with(stringSerde, stringSerde));
ValueMapper<Long, String> countMapper = new ValueMapper<Long, String>() {
@Override
public String apply(final Long value) {
return value.toString();
}
};
inputStream.groupByKey().count(Materialized.<String, Long>as(inMemoryStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic1, Produced.with(stringSerde, stringSerde));
inputStream.groupByKey().count(Materialized.<String, Long>as(persistentStoreSupplier)).toStream().mapValues(countMapper).to(sinkTopic2, Produced.with(stringSerde, stringSerde));
final KafkaStreams streams = new KafkaStreams(builder.build(), streamsProperties);
streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread t, final Throwable e) {
System.err.println("FATAL: An unexpected exception " + e);
e.printStackTrace(System.err);
System.err.flush();
shutdown(streams);
}
});
streams.setStateListener(new KafkaStreams.StateListener() {
@Override
public void onChange(final KafkaStreams.State newState, final KafkaStreams.State oldState) {
if (newState == KafkaStreams.State.RUNNING && oldState == KafkaStreams.State.REBALANCING) {
final Set<ThreadMetadata> threadMetadata = streams.localThreadsMetadata();
for (final ThreadMetadata threadMetadatum : threadMetadata) {
System.out.println("ACTIVE_TASKS:" + threadMetadatum.activeTasks().size() + " STANDBY_TASKS:" + threadMetadatum.standbyTasks().size());
}
}
}
});
System.out.println("Start Kafka Streams");
streams.start();
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
shutdown(streams);
System.out.println("Shut down streams now");
}
}));
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class QueryableStateIntegrationTest method shouldBeAbleToQueryMapValuesAfterFilterState.
@Test
public void shouldBeAbleToQueryMapValuesAfterFilterState() throws Exception {
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
final StreamsBuilder builder = new StreamsBuilder();
final String[] keys = { "hello", "goodbye", "welcome", "go", "kafka" };
final Set<KeyValue<String, String>> batch1 = new HashSet<>(Arrays.asList(new KeyValue<>(keys[0], "1"), new KeyValue<>(keys[1], "1"), new KeyValue<>(keys[2], "3"), new KeyValue<>(keys[3], "5"), new KeyValue<>(keys[4], "2")));
final Set<KeyValue<String, Long>> expectedBatch1 = new HashSet<>(Collections.singleton(new KeyValue<>(keys[4], 2L)));
IntegrationTestUtils.produceKeyValuesSynchronously(streamOne, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
final Predicate<String, String> filterPredicate = (key, value) -> key.contains("kafka");
final KTable<String, String> t1 = builder.table(streamOne);
final KTable<String, String> t2 = t1.filter(filterPredicate, Materialized.as("queryFilter"));
final KTable<String, Long> t3 = t2.mapValues((ValueMapper<String, Long>) Long::valueOf, Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("queryMapValues").withValueSerde(Serdes.Long()));
t3.toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
startKafkaStreamsAndWaitForRunningState(kafkaStreams);
waitUntilAtLeastNumRecordProcessed(outputTopic, 1);
final ReadOnlyKeyValueStore<String, Long> myMapStore = IntegrationTestUtils.getStore("queryMapValues", kafkaStreams, keyValueStore());
for (final KeyValue<String, Long> expectedEntry : expectedBatch1) {
assertEquals(expectedEntry.value, myMapStore.get(expectedEntry.key));
}
for (final KeyValue<String, String> batchEntry : batch1) {
final KeyValue<String, Long> batchEntryMapValue = new KeyValue<>(batchEntry.key, Long.valueOf(batchEntry.value));
if (!expectedBatch1.contains(batchEntryMapValue)) {
assertNull(myMapStore.get(batchEntry.key));
}
}
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class QueryableStateIntegrationTest method createCountStream.
/**
* Creates a typical word count topology
*/
private KafkaStreams createCountStream(final String inputTopic, final String outputTopic, final String windowOutputTopic, final String storeName, final String windowStoreName, final Properties streamsConfiguration) {
final StreamsBuilder builder = new StreamsBuilder();
final Serde<String> stringSerde = Serdes.String();
final KStream<String, String> textLines = builder.stream(inputTopic, Consumed.with(stringSerde, stringSerde));
final KGroupedStream<String, String> groupedByWord = textLines.flatMapValues((ValueMapper<String, Iterable<String>>) value -> Arrays.asList(value.split("\\W+"))).groupBy(MockMapper.selectValueMapper());
// Create a State Store for the all time word count
groupedByWord.count(Materialized.as(storeName + "-" + inputTopic)).toStream().to(outputTopic, Produced.with(Serdes.String(), Serdes.Long()));
// Create a Windowed State Store that contains the word count for every 1 minute
groupedByWord.windowedBy(TimeWindows.of(ofMillis(WINDOW_SIZE))).count(Materialized.as(windowStoreName + "-" + inputTopic)).toStream((key, value) -> key.key()).to(windowOutputTopic, Produced.with(Serdes.String(), Serdes.Long()));
return new KafkaStreams(builder.build(), streamsConfiguration);
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class KTableKTableLeftJoinTest method shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions.
/**
* This test was written to reproduce https://issues.apache.org/jira/browse/KAFKA-4492
* It is based on a fairly complicated join used by the developer that reported the bug.
* Before the fix this would trigger an IllegalStateException.
*/
@Test
public void shouldNotThrowIllegalStateExceptionWhenMultiCacheEvictions() {
final String agg = "agg";
final String tableOne = "tableOne";
final String tableTwo = "tableTwo";
final String tableThree = "tableThree";
final String tableFour = "tableFour";
final String tableFive = "tableFive";
final String tableSix = "tableSix";
final String[] inputs = { agg, tableOne, tableTwo, tableThree, tableFour, tableFive, tableSix };
final StreamsBuilder builder = new StreamsBuilder();
final Consumed<Long, String> consumed = Consumed.with(Serdes.Long(), Serdes.String());
final KTable<Long, String> aggTable = builder.table(agg, consumed, Materialized.as(Stores.inMemoryKeyValueStore("agg-base-store"))).groupBy(KeyValue::new, Grouped.with(Serdes.Long(), Serdes.String())).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_ADDER, Materialized.as(Stores.inMemoryKeyValueStore("agg-store")));
final KTable<Long, String> one = builder.table(tableOne, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableOne-base-store")));
final KTable<Long, String> two = builder.table(tableTwo, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableTwo-base-store")));
final KTable<Long, String> three = builder.table(tableThree, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableThree-base-store")));
final KTable<Long, String> four = builder.table(tableFour, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFour-base-store")));
final KTable<Long, String> five = builder.table(tableFive, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableFive-base-store")));
final KTable<Long, String> six = builder.table(tableSix, consumed, Materialized.as(Stores.inMemoryKeyValueStore("tableSix-base-store")));
final ValueMapper<String, String> mapper = value -> value.toUpperCase(Locale.ROOT);
final KTable<Long, String> seven = one.mapValues(mapper);
final KTable<Long, String> eight = six.leftJoin(seven, MockValueJoiner.TOSTRING_JOINER);
aggTable.leftJoin(one, MockValueJoiner.TOSTRING_JOINER).leftJoin(two, MockValueJoiner.TOSTRING_JOINER).leftJoin(three, MockValueJoiner.TOSTRING_JOINER).leftJoin(four, MockValueJoiner.TOSTRING_JOINER).leftJoin(five, MockValueJoiner.TOSTRING_JOINER).leftJoin(eight, MockValueJoiner.TOSTRING_JOINER).mapValues(mapper);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final String[] values = { "a", "AA", "BBB", "CCCC", "DD", "EEEEEEEE", "F", "GGGGGGGGGGGGGGG", "HHH", "IIIIIIIIII", "J", "KK", "LLLL", "MMMMMMMMMMMMMMMMMMMMMM", "NNNNN", "O", "P", "QQQQQ", "R", "SSSS", "T", "UU", "VVVVVVVVVVVVVVVVVVV" };
TestInputTopic<Long, String> inputTopic;
final Random random = new Random();
for (int i = 0; i < 1000; i++) {
for (final String input : inputs) {
final Long key = (long) random.nextInt(1000);
final String value = values[random.nextInt(values.length)];
inputTopic = driver.createInputTopic(input, Serdes.Long().serializer(), Serdes.String().serializer());
inputTopic.pipeInput(key, value);
}
}
}
}
use of org.apache.kafka.streams.kstream.ValueMapper in project kafka by apache.
the class KTableMapValuesTest method testSendingOldValue.
@Test
public void testSendingOldValue() throws IOException {
KStreamBuilder builder = new KStreamBuilder();
String topic1 = "topic1";
KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(stringSerde, stringSerde, topic1, "anyStoreName");
KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(new ValueMapper<String, Integer>() {
@Override
public Integer apply(String value) {
return new Integer(value);
}
});
table2.enableSendingOldValues();
MockProcessorSupplier<String, Integer> proc = new MockProcessorSupplier<>();
builder.addProcessor("proc", proc, table2.name);
driver = new KStreamTestDriver(builder, stateDir, null, null);
assertTrue(table1.sendingOldValueEnabled());
assertTrue(table2.sendingOldValueEnabled());
driver.process(topic1, "A", "01");
driver.process(topic1, "B", "01");
driver.process(topic1, "C", "01");
driver.flushState();
proc.checkAndClearProcessResult("A:(1<-null)", "B:(1<-null)", "C:(1<-null)");
driver.process(topic1, "A", "02");
driver.process(topic1, "B", "02");
driver.flushState();
proc.checkAndClearProcessResult("A:(2<-1)", "B:(2<-1)");
driver.process(topic1, "A", "03");
driver.flushState();
proc.checkAndClearProcessResult("A:(3<-2)");
driver.process(topic1, "A", null);
driver.flushState();
proc.checkAndClearProcessResult("A:(null<-3)");
}
Aggregations