use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.
the class KTableMapValuesTest method shouldNotEnableSendingOldValuesOnParentIfMapValuesMaterialized.
@Test
public void shouldNotEnableSendingOldValuesOnParentIfMapValuesMaterialized() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s), Materialized.<String, Integer, KeyValueStore<Bytes, byte[]>>as("bob").withValueSerde(Serdes.Integer()));
table2.enableSendingOldValues(true);
assertThat(table1.sendingOldValueEnabled(), is(false));
assertThat(table2.sendingOldValueEnabled(), is(true));
testSendingOldValues(builder, topic1, table2);
}
use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.
the class KTableMapValuesTest method shouldEnableSendingOldValuesOnParentIfMapValuesNotMaterialized.
@Test
public void shouldEnableSendingOldValuesOnParentIfMapValuesNotMaterialized() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
table2.enableSendingOldValues(true);
assertThat(table1.sendingOldValueEnabled(), is(true));
assertThat(table2.sendingOldValueEnabled(), is(true));
testSendingOldValues(builder, topic1, table2);
}
use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.
the class KTableMapValuesTest method testNotSendingOldValue.
@Test
public void testNotSendingOldValue() {
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, Integer> table2 = (KTableImpl<String, String, Integer>) table1.mapValues(s -> Integer.valueOf(s));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final Topology topology = builder.build().addProcessor("proc", supplier, table2.name);
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, props)) {
final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic(topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<String, Integer, Void, Void> proc = supplier.theCapturedProcessor();
assertFalse(table1.sendingOldValueEnabled());
assertFalse(table2.sendingOldValueEnabled());
inputTopic1.pipeInput("A", "01", 5L);
inputTopic1.pipeInput("B", "01", 10L);
inputTopic1.pipeInput("C", "01", 15L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(1, null), 5), new KeyValueTimestamp<>("B", new Change<>(1, null), 10), new KeyValueTimestamp<>("C", new Change<>(1, null), 15));
inputTopic1.pipeInput("A", "02", 10L);
inputTopic1.pipeInput("B", "02", 8L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(2, null), 10), new KeyValueTimestamp<>("B", new Change<>(2, null), 8));
inputTopic1.pipeInput("A", "03", 20L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(3, null), 20));
inputTopic1.pipeInput("A", (String) null, 30L);
proc.checkAndClearProcessResult(new KeyValueTimestamp<>("A", new Change<>(null, null), 30));
}
}
use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.
the class KTableFilterTest method shouldSkipNullToRepartitionOnMaterialization.
@Test
public void shouldSkipNullToRepartitionOnMaterialization() {
// Do not explicitly set enableSendingOldValues. Let a further downstream stateful operator trigger it instead.
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, String> table2 = (KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept"), Materialized.as("store2")).groupBy(MockMapper.noOpKeyValueMapper()).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER, Materialized.as("mock-result"));
doTestSkipNullOnMaterialization(builder, table1, table2, topic1);
}
use of org.apache.kafka.streams.kstream.Consumed in project kafka by apache.
the class KTableFilterTest method shouldSkipNullToRepartitionWithoutMaterialization.
@Test
public void shouldSkipNullToRepartitionWithoutMaterialization() {
// Do not explicitly set enableSendingOldValues. Let a further downstream stateful operator trigger it instead.
final StreamsBuilder builder = new StreamsBuilder();
final String topic1 = "topic1";
final Consumed<String, String> consumed = Consumed.with(Serdes.String(), Serdes.String());
final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, consumed);
final KTableImpl<String, String, String> table2 = (KTableImpl<String, String, String>) table1.filter((key, value) -> value.equalsIgnoreCase("accept")).groupBy(MockMapper.noOpKeyValueMapper()).reduce(MockReducer.STRING_ADDER, MockReducer.STRING_REMOVER);
doTestSkipNullOnMaterialization(builder, table1, table2, topic1);
}
Aggregations