use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KStreamImplTest method testToWithNullValueSerdeDoesntNPE.
@Test
public void testToWithNullValueSerdeDoesntNPE() {
final KStreamBuilder builder = new KStreamBuilder();
final KStream<String, String> inputStream = builder.stream(stringSerde, stringSerde, "input");
inputStream.to(stringSerde, null, "output");
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KStreamKStreamJoinTest method testAsymetricWindowingAfter.
@Test
public void testAsymetricWindowingAfter() throws Exception {
long time = 1000L;
KStreamBuilder builder = new KStreamBuilder();
final int[] expectedKeys = new int[] { 0, 1, 2, 3 };
KStream<Integer, String> stream1;
KStream<Integer, String> stream2;
KStream<Integer, String> joined;
MockProcessorSupplier<Integer, String> processor;
processor = new MockProcessorSupplier<>();
stream1 = builder.stream(intSerde, stringSerde, topic1);
stream2 = builder.stream(intSerde, stringSerde, topic2);
joined = stream1.join(stream2, MockValueJoiner.TOSTRING_JOINER, JoinWindows.of(0).after(100), intSerde, stringSerde, stringSerde);
joined.process(processor);
Collection<Set<String>> copartitionGroups = builder.copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(new HashSet<>(Arrays.asList(topic1, topic2)), copartitionGroups.iterator().next());
driver = new KStreamTestDriver(builder, stateDir);
for (int i = 0; i < expectedKeys.length; i++) {
setRecordContext(time + i, topic1);
driver.process(topic1, expectedKeys[i], "X" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
time = 1000L - 1L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
time = 1000 + 100L;
setRecordContext(time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("0:X0+YY0", "1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("1:X1+YY1", "2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("2:X2+YY2", "3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult("3:X3+YY3");
setRecordContext(++time, topic2);
for (int expectedKey : expectedKeys) {
driver.process(topic2, expectedKey, "YY" + expectedKey);
}
processor.checkAndClearProcessResult();
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KafkaStreamsTest method shouldReturnFalseOnCloseWhenThreadsHaventTerminated.
@Test
public void shouldReturnFalseOnCloseWhenThreadsHaventTerminated() throws Exception {
final AtomicBoolean keepRunning = new AtomicBoolean(true);
try {
final Properties props = new Properties();
props.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "appId");
props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final KStreamBuilder builder = new KStreamBuilder();
final CountDownLatch latch = new CountDownLatch(1);
final String topic = "input";
CLUSTER.createTopic(topic);
builder.stream(Serdes.String(), Serdes.String(), topic).foreach(new ForeachAction<String, String>() {
@Override
public void apply(final String key, final String value) {
try {
latch.countDown();
while (keepRunning.get()) {
Thread.sleep(10);
}
} catch (InterruptedException e) {
// no-op
}
}
});
final KafkaStreams streams = new KafkaStreams(builder, props);
streams.start();
IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp(topic, Collections.singletonList(new KeyValue<>("A", "A")), TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), System.currentTimeMillis());
assertTrue("Timed out waiting to receive single message", latch.await(30, TimeUnit.SECONDS));
assertFalse(streams.close(10, TimeUnit.MILLISECONDS));
} finally {
// stop the thread so we don't interfere with other tests etc
keepRunning.set(false);
}
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KafkaStreamsTest method testCannotCleanupWhileRunning.
@Test(expected = IllegalStateException.class)
public void testCannotCleanupWhileRunning() throws Exception {
final Properties props = new Properties();
props.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "testCannotCleanupWhileRunning");
props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
final KStreamBuilder builder = new KStreamBuilder();
final KafkaStreams streams = new KafkaStreams(builder, props);
streams.start();
try {
streams.cleanUp();
} catch (final IllegalStateException e) {
Assert.assertEquals("Cannot clean up while running.", e.getMessage());
throw e;
} finally {
streams.close();
}
}
use of org.apache.kafka.streams.kstream.KStreamBuilder in project kafka by apache.
the class KafkaStreamsTest method testInitializesAndDestroysMetricsReporters.
@Test
public void testInitializesAndDestroysMetricsReporters() throws Exception {
final int oldInitCount = MockMetricsReporter.INIT_COUNT.get();
final KStreamBuilder builder = new KStreamBuilder();
final KafkaStreams streams = new KafkaStreams(builder, props);
final int newInitCount = MockMetricsReporter.INIT_COUNT.get();
final int initDiff = newInitCount - oldInitCount;
assertTrue("some reporters should be initialized by calling on construction", initDiff > 0);
StateListenerStub stateListener = new StateListenerStub();
streams.setStateListener(stateListener);
Assert.assertEquals(streams.state(), KafkaStreams.State.CREATED);
Assert.assertEquals(stateListener.numChanges, 0);
streams.start();
Assert.assertEquals(streams.state(), KafkaStreams.State.RUNNING);
Assert.assertEquals(stateListener.numChanges, 1);
Assert.assertEquals(stateListener.oldState, KafkaStreams.State.CREATED);
Assert.assertEquals(stateListener.newState, KafkaStreams.State.RUNNING);
Assert.assertEquals(stateListener.mapStates.get(KafkaStreams.State.RUNNING).longValue(), 1L);
final int oldCloseCount = MockMetricsReporter.CLOSE_COUNT.get();
streams.close();
assertEquals(oldCloseCount + initDiff, MockMetricsReporter.CLOSE_COUNT.get());
Assert.assertEquals(streams.state(), KafkaStreams.State.NOT_RUNNING);
Assert.assertEquals(stateListener.mapStates.get(KafkaStreams.State.RUNNING).longValue(), 1L);
Assert.assertEquals(stateListener.mapStates.get(KafkaStreams.State.NOT_RUNNING).longValue(), 1L);
}
Aggregations