use of org.apache.kafka.common.serialization.Serdes.String in project kafka by apache.
the class KTableSuppressProcessorTest method finalResultsSuppressionShouldBufferAndEmitAtGraceExpiration.
@Test
public void finalResultsSuppressionShouldBufferAndEmitAtGraceExpiration() {
final Harness<Windowed<String>, Long> harness = new Harness<>(finalResults(ofMillis(1L)), timeWindowedSerdeFrom(String.class, 1L), Long());
final MockInternalNewProcessorContext<Windowed<String>, Change<Long>> context = harness.context;
final long windowStart = 99L;
final long recordTime = 99L;
final long windowEnd = 100L;
context.setRecordMetadata("topic", 0, 0);
context.setTimestamp(recordTime);
final Windowed<String> key = new Windowed<>("hey", new TimeWindow(windowStart, windowEnd));
final Change<Long> value = ARBITRARY_CHANGE;
harness.processor.process(new Record<>(key, value, recordTime));
assertThat(context.forwarded(), hasSize(0));
// although the stream time is now 100, we have to wait 1 ms after the window *end* before we
// emit "hey", so we don't emit yet.
final long windowStart2 = 100L;
final long recordTime2 = 100L;
final long windowEnd2 = 101L;
context.setRecordMetadata("topic", 0, 1);
context.setTimestamp(recordTime2);
harness.processor.process(new Record<>(new Windowed<>("dummyKey1", new TimeWindow(windowStart2, windowEnd2)), ARBITRARY_CHANGE, recordTime2));
assertThat(context.forwarded(), hasSize(0));
// ok, now it's time to emit "hey"
final long windowStart3 = 101L;
final long recordTime3 = 101L;
final long windowEnd3 = 102L;
context.setRecordMetadata("topic", 0, 1);
context.setTimestamp(recordTime3);
harness.processor.process(new Record<>(new Windowed<>("dummyKey2", new TimeWindow(windowStart3, windowEnd3)), ARBITRARY_CHANGE, recordTime3));
assertThat(context.forwarded(), hasSize(1));
final MockProcessorContext.CapturedForward capturedForward = context.forwarded().get(0);
assertThat(capturedForward.record(), is(new Record<>(key, value, recordTime)));
}
use of org.apache.kafka.common.serialization.Serdes.String in project kafka by apache.
the class KTableSuppressProcessorTest method windowedZeroTimeLimitShouldImmediatelyEmit.
@Test
public void windowedZeroTimeLimitShouldImmediatelyEmit() {
final Harness<Windowed<String>, Long> harness = new Harness<>(untilTimeLimit(ZERO, unbounded()), timeWindowedSerdeFrom(String.class, 100L), Long());
final MockInternalNewProcessorContext<Windowed<String>, Change<Long>> context = harness.context;
final long timestamp = ARBITRARY_LONG;
context.setRecordMetadata("", 0, 0L);
context.setTimestamp(timestamp);
final Windowed<String> key = new Windowed<>("hey", new TimeWindow(0L, 100L));
final Change<Long> value = ARBITRARY_CHANGE;
harness.processor.process(new Record<>(key, value, timestamp));
assertThat(context.forwarded(), hasSize(1));
final MockProcessorContext.CapturedForward capturedForward = context.forwarded().get(0);
assertThat(capturedForward.record(), is(new Record<>(key, value, timestamp)));
}
use of org.apache.kafka.common.serialization.Serdes.String in project kafka by apache.
the class KTableSuppressProcessorTest method suppressShouldEmitWhenOverRecordCapacity.
@Test
public void suppressShouldEmitWhenOverRecordCapacity() {
final Harness<String, Long> harness = new Harness<>(untilTimeLimit(Duration.ofDays(100), maxRecords(1)), String(), Long());
final MockInternalNewProcessorContext<String, Change<Long>> context = harness.context;
final long timestamp = 100L;
context.setRecordMetadata("", 0, 0L);
context.setTimestamp(timestamp);
final String key = "hey";
final Change<Long> value = new Change<>(null, ARBITRARY_LONG);
harness.processor.process(new Record<>(key, value, timestamp));
context.setRecordMetadata("", 0, 1L);
context.setTimestamp(timestamp + 1);
harness.processor.process(new Record<>("dummyKey", value, timestamp + 1));
assertThat(context.forwarded(), hasSize(1));
final MockProcessorContext.CapturedForward capturedForward = context.forwarded().get(0);
assertThat(capturedForward.record(), is(new Record<>(key, value, timestamp)));
}
use of org.apache.kafka.common.serialization.Serdes.String in project kafka by apache.
the class KTableSuppressProcessorTest method finalResultsWithZeroGraceAtWindowEndShouldImmediatelyEmit.
@Test
public void finalResultsWithZeroGraceAtWindowEndShouldImmediatelyEmit() {
final Harness<Windowed<String>, Long> harness = new Harness<>(finalResults(ofMillis(0L)), timeWindowedSerdeFrom(String.class, 100L), Long());
final MockInternalNewProcessorContext<Windowed<String>, Change<Long>> context = harness.context;
final long timestamp = 100L;
context.setRecordMetadata("", 0, 0L);
context.setTimestamp(timestamp);
final Windowed<String> key = new Windowed<>("hey", new TimeWindow(0, 100L));
final Change<Long> value = ARBITRARY_CHANGE;
harness.processor.process(new Record<>(key, value, timestamp));
assertThat(context.forwarded(), hasSize(1));
final MockProcessorContext.CapturedForward capturedForward = context.forwarded().get(0);
assertThat(capturedForward.record(), is(new Record<>(key, value, timestamp)));
}
use of org.apache.kafka.common.serialization.Serdes.String in project kafka by apache.
the class KTableSuppressProcessorTest method suppressShouldEmitWhenOverByteCapacity.
@Test
public void suppressShouldEmitWhenOverByteCapacity() {
final Harness<String, Long> harness = new Harness<>(untilTimeLimit(Duration.ofDays(100), maxBytes(60L)), String(), Long());
final MockInternalNewProcessorContext<String, Change<Long>> context = harness.context;
final long timestamp = 100L;
context.setRecordMetadata("", 0, 0L);
context.setTimestamp(timestamp);
final String key = "hey";
final Change<Long> value = new Change<>(null, ARBITRARY_LONG);
harness.processor.process(new Record<>(key, value, timestamp));
context.setRecordMetadata("", 0, 1L);
context.setTimestamp(timestamp + 1);
harness.processor.process(new Record<>("dummyKey", value, timestamp + 1));
assertThat(context.forwarded(), hasSize(1));
final MockProcessorContext.CapturedForward capturedForward = context.forwarded().get(0);
assertThat(capturedForward.record(), is(new Record<>(key, value, timestamp)));
}
Aggregations