use of org.apache.beam.runners.local.StructuralKey in project beam by apache.
the class WatermarkManagerTest method inputWatermarkDuplicates.
@Test
public void inputWatermarkDuplicates() {
Watermark mockWatermark = Mockito.mock(Watermark.class);
AppliedPTransformInputWatermark underTest = new AppliedPTransformInputWatermark("underTest", ImmutableList.of(mockWatermark), update -> {
});
// Refresh
when(mockWatermark.get()).thenReturn(new Instant(0));
underTest.refresh();
assertEquals(new Instant(0), underTest.get());
// Apply a timer update
StructuralKey<String> key = StructuralKey.of("key", StringUtf8Coder.of());
TimerData timer1 = TimerData.of("a", StateNamespaces.global(), new Instant(100), new Instant(100), TimeDomain.EVENT_TIME);
TimerData timer2 = TimerData.of("a", StateNamespaces.global(), new Instant(200), new Instant(200), TimeDomain.EVENT_TIME);
underTest.updateTimers(TimerUpdate.builder(key).setTimer(timer1).setTimer(timer2).build());
// Only the last timer update should be observable
assertEquals(timer2.getTimestamp(), underTest.getEarliestTimerTimestamp());
// Advance the input watermark
when(mockWatermark.get()).thenReturn(new Instant(1000));
underTest.refresh();
// input watermark is not held by timers
assertEquals(new Instant(1000), underTest.get());
// Examine the fired event time timers
Map<StructuralKey<?>, List<TimerData>> fired = underTest.extractFiredEventTimeTimers();
List<TimerData> timers = fired.get(key);
assertNotNull(timers);
assertThat(timers, contains(timer2));
// Update based on timer firings
underTest.updateTimers(TimerUpdate.builder(key).withCompletedTimers(timers).build());
// Now we should be able to advance
assertEquals(BoundedWindow.TIMESTAMP_MAX_VALUE, underTest.getEarliestTimerTimestamp());
// Nothing left to fire
fired = underTest.extractFiredEventTimeTimers();
assertThat(fired.entrySet(), empty());
}
use of org.apache.beam.runners.local.StructuralKey in project beam by apache.
the class ParDoEvaluator method create.
public static <InputT, OutputT> ParDoEvaluator<InputT> create(EvaluationContext evaluationContext, PipelineOptions options, DirectStepContext stepContext, AppliedPTransform<?, ?, ?> application, Coder<InputT> inputCoder, WindowingStrategy<?, ? extends BoundedWindow> windowingStrategy, DoFn<InputT, OutputT> fn, StructuralKey<?> key, List<PCollectionView<?>> sideInputs, TupleTag<OutputT> mainOutputTag, List<TupleTag<?>> additionalOutputTags, Map<TupleTag<?>, PCollection<?>> outputs, DoFnSchemaInformation doFnSchemaInformation, Map<String, PCollectionView<?>> sideInputMapping, DoFnRunnerFactory<InputT, OutputT> runnerFactory) {
BundleOutputManager outputManager = createOutputManager(evaluationContext, key, outputs);
ReadyCheckingSideInputReader sideInputReader = evaluationContext.createSideInputReader(sideInputs);
Map<TupleTag<?>, Coder<?>> outputCoders = outputs.entrySet().stream().collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().getCoder()));
PushbackSideInputDoFnRunner<InputT, OutputT> runner = runnerFactory.createRunner(options, fn, sideInputs, sideInputReader, outputManager, mainOutputTag, additionalOutputTags, stepContext, inputCoder, outputCoders, windowingStrategy, doFnSchemaInformation, sideInputMapping);
return create(runner, stepContext, application, outputManager);
}
Aggregations