use of org.apache.beam.runners.core.DoFnRunner in project beam by apache.
the class BatchGroupAlsoByWindowReshuffleDoFnTest method makeRunner.
private static <K, InputT, OutputT, W extends BoundedWindow> DoFnRunner<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> makeRunner(GroupAlsoByWindowDoFnFactory<K, InputT, OutputT> fnFactory, WindowingStrategy<?, W> windowingStrategy, TupleTag<KV<K, OutputT>> outputTag, DoFnRunners.OutputManager outputManager) {
final StepContext stepContext = new TestStepContext(STEP_NAME);
StateInternalsFactory<K> stateInternalsFactory = key -> stepContext.stateInternals();
BatchGroupAlsoByWindowFn<K, InputT, OutputT> fn = fnFactory.forStrategy(windowingStrategy, stateInternalsFactory);
return new GroupAlsoByWindowFnRunner<>(PipelineOptionsFactory.create(), fn, NullSideInputReader.empty(), outputManager, outputTag, stepContext);
}
use of org.apache.beam.runners.core.DoFnRunner in project beam by apache.
the class DoFnOperatorTest method testWatermarkUpdateAfterWatermarkHoldRelease.
@Test
public void testWatermarkUpdateAfterWatermarkHoldRelease() throws Exception {
Coder<WindowedValue<KV<String, String>>> coder = WindowedValue.getValueOnlyCoder(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()));
TupleTag<KV<String, String>> outputTag = new TupleTag<>("main-output");
List<Long> emittedWatermarkHolds = new ArrayList<>();
KeySelector<WindowedValue<KV<String, String>>, ByteBuffer> keySelector = e -> FlinkKeyUtils.encodeKey(e.getValue().getKey(), StringUtf8Coder.of());
DoFnOperator<KV<String, String>, KV<String, String>> doFnOperator = new DoFnOperator<KV<String, String>, KV<String, String>>(new IdentityDoFn<>(), "stepName", coder, Collections.emptyMap(), outputTag, Collections.emptyList(), new DoFnOperator.MultiOutputOutputManagerFactory<>(outputTag, coder, new SerializablePipelineOptions(FlinkPipelineOptions.defaults())), WindowingStrategy.globalDefault(), new HashMap<>(), /* side-input mapping */
Collections.emptyList(), /* side inputs */
FlinkPipelineOptions.defaults(), StringUtf8Coder.of(), keySelector, DoFnSchemaInformation.create(), Collections.emptyMap()) {
@Override
protected DoFnRunner<KV<String, String>, KV<String, String>> createWrappingDoFnRunner(DoFnRunner<KV<String, String>, KV<String, String>> wrappedRunner, StepContext stepContext) {
StateNamespace namespace = StateNamespaces.window(GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE);
StateTag<WatermarkHoldState> holdTag = StateTags.watermarkStateInternal("hold", TimestampCombiner.LATEST);
WatermarkHoldState holdState = stepContext.stateInternals().state(namespace, holdTag);
TimerInternals timerInternals = stepContext.timerInternals();
return new DoFnRunner<KV<String, String>, KV<String, String>>() {
@Override
public void startBundle() {
wrappedRunner.startBundle();
}
@Override
public void processElement(WindowedValue<KV<String, String>> elem) {
wrappedRunner.processElement(elem);
holdState.add(elem.getTimestamp());
timerInternals.setTimer(namespace, "timer", "family", elem.getTimestamp().plus(Duration.millis(1)), elem.getTimestamp().plus(Duration.millis(1)), TimeDomain.EVENT_TIME);
timerInternals.setTimer(namespace, "cleanup", "", GlobalWindow.INSTANCE.maxTimestamp(), GlobalWindow.INSTANCE.maxTimestamp(), TimeDomain.EVENT_TIME);
}
@Override
public <KeyT> void onTimer(String timerId, String timerFamilyId, KeyT key, BoundedWindow window, Instant timestamp, Instant outputTimestamp, TimeDomain timeDomain) {
if ("cleanup".equals(timerId)) {
holdState.clear();
} else {
holdState.add(outputTimestamp);
}
}
@Override
public void finishBundle() {
wrappedRunner.finishBundle();
}
@Override
public <KeyT> void onWindowExpiration(BoundedWindow window, Instant timestamp, KeyT key) {
wrappedRunner.onWindowExpiration(window, timestamp, key);
}
@Override
public DoFn<KV<String, String>, KV<String, String>> getFn() {
return doFn;
}
};
}
@Override
void emitWatermarkIfHoldChanged(long currentWatermarkHold) {
emittedWatermarkHolds.add(keyedStateInternals.minWatermarkHoldMs());
}
};
OneInputStreamOperatorTestHarness<WindowedValue<KV<String, String>>, WindowedValue<KV<String, String>>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(doFnOperator, keySelector, new CoderTypeInformation<>(FlinkKeyUtils.ByteBufferCoder.of(), FlinkPipelineOptions.defaults()));
testHarness.setup();
Instant now = Instant.now();
testHarness.open();
// process first element, set hold to `now', setup timer for `now + 1'
testHarness.processElement(new StreamRecord<>(WindowedValue.timestampedValueInGlobalWindow(KV.of("Key", "Hello"), now)));
assertThat(emittedWatermarkHolds, is(equalTo(Collections.singletonList(now.getMillis()))));
// fire timer, change hold to `now + 2'
testHarness.processWatermark(now.getMillis() + 2);
assertThat(emittedWatermarkHolds, is(equalTo(Arrays.asList(now.getMillis(), now.getMillis() + 1))));
// process second element, verify we emitted changed hold
testHarness.processElement(new StreamRecord<>(WindowedValue.timestampedValueInGlobalWindow(KV.of("Key", "Hello"), now.plus(Duration.millis(2)))));
assertThat(emittedWatermarkHolds, is(equalTo(Arrays.asList(now.getMillis(), now.getMillis() + 1, now.getMillis() + 2))));
testHarness.processWatermark(GlobalWindow.INSTANCE.maxTimestamp().plus(Duration.millis(1)).getMillis());
testHarness.processWatermark(BoundedWindow.TIMESTAMP_MAX_VALUE.getMillis());
testHarness.close();
}
use of org.apache.beam.runners.core.DoFnRunner in project beam by apache.
the class DoFnOperator method open.
@Override
public void open() throws Exception {
// WindowDoFnOperator need use state and timer to get DoFn.
// So must wait StateInternals and TimerInternals ready.
// This will be called after initializeState()
this.doFn = getDoFn();
FlinkPipelineOptions options = serializedOptions.get().as(FlinkPipelineOptions.class);
doFnInvoker = DoFnInvokers.tryInvokeSetupFor(doFn, options);
StepContext stepContext = new FlinkStepContext();
doFnRunner = DoFnRunners.simpleRunner(options, doFn, sideInputReader, outputManager, mainOutputTag, additionalOutputTags, stepContext, getInputCoder(), outputCoders, windowingStrategy, doFnSchemaInformation, sideInputMapping);
if (requiresStableInput) {
// put this in front of the root FnRunner before any additional wrappers
doFnRunner = bufferingDoFnRunner = BufferingDoFnRunner.create(doFnRunner, "stable-input-buffer", windowedInputCoder, windowingStrategy.getWindowFn().windowCoder(), getOperatorStateBackend(), getKeyedStateBackend(), options.getNumConcurrentCheckpoints(), serializedOptions);
}
doFnRunner = createWrappingDoFnRunner(doFnRunner, stepContext);
earlyBindStateIfNeeded();
if (!options.getDisableMetrics()) {
flinkMetricContainer = new FlinkMetricContainer(getRuntimeContext());
doFnRunner = new DoFnRunnerWithMetricsUpdate<>(stepName, doFnRunner, flinkMetricContainer);
String checkpointMetricNamespace = options.getReportCheckpointDuration();
if (checkpointMetricNamespace != null) {
MetricName checkpointMetric = MetricName.named(checkpointMetricNamespace, "checkpoint_duration");
checkpointStats = new CheckpointStats(() -> flinkMetricContainer.getMetricsContainer(stepName).getDistribution(checkpointMetric));
}
}
elementCount = 0L;
lastFinishBundleTime = getProcessingTimeService().getCurrentProcessingTime();
// Schedule timer to check timeout of finish bundle.
long bundleCheckPeriod = Math.max(maxBundleTimeMills / 2, 1);
checkFinishBundleTimer = getProcessingTimeService().scheduleAtFixedRate(timestamp -> checkInvokeFinishBundleByTime(), bundleCheckPeriod, bundleCheckPeriod);
if (doFn instanceof SplittableParDoViaKeyedWorkItems.ProcessFn) {
pushbackDoFnRunner = new ProcessFnRunner<>((DoFnRunner) doFnRunner, sideInputs, sideInputHandler);
} else {
pushbackDoFnRunner = SimplePushbackSideInputDoFnRunner.create(doFnRunner, sideInputs, sideInputHandler);
}
bundleFinalizer = new InMemoryBundleFinalizer();
pendingFinalizations = new LinkedHashMap<>();
}
use of org.apache.beam.runners.core.DoFnRunner in project beam by apache.
the class BufferingDoFnRunnerTest method createBufferingDoFnRunner.
private static BufferingDoFnRunner createBufferingDoFnRunner(int concurrentCheckpoints, List<BufferingDoFnRunner.CheckpointIdentifier> notYetAcknowledgeCheckpoints) throws Exception {
DoFnRunner doFnRunner = Mockito.mock(DoFnRunner.class);
OperatorStateBackend operatorStateBackend = Mockito.mock(OperatorStateBackend.class);
// Setup not yet acknowledged checkpoint union list state
ListState unionListState = Mockito.mock(ListState.class);
Mockito.when(operatorStateBackend.getUnionListState(Mockito.any())).thenReturn(unionListState);
Mockito.when(unionListState.get()).thenReturn(notYetAcknowledgeCheckpoints);
// Setup buffer list state
Mockito.when(operatorStateBackend.getListState(Mockito.any())).thenReturn(Mockito.mock(ListState.class));
return BufferingDoFnRunner.create(doFnRunner, "stable-input", StringUtf8Coder.of(), WindowedValue.getFullCoder(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), operatorStateBackend, null, concurrentCheckpoints, new SerializablePipelineOptions(FlinkPipelineOptions.defaults()));
}
use of org.apache.beam.runners.core.DoFnRunner in project beam by apache.
the class GroupAlsoByWindowsParDoFn method createRunner.
/**
* Composes and returns a {@link DoFnRunner} based on the parameters.
*
* <p>A {@code SimpleOldDoFnRunner} executes the {@link GroupAlsoByWindowFn}.
*
* <p>A {@link LateDataDroppingDoFnRunner} handles late data dropping for a {@link
* StreamingGroupAlsoByWindowViaWindowSetFn}.
*
* <p>A {@link StreamingSideInputDoFnRunner} handles streaming side inputs.
*
* <p>A {@link StreamingKeyedWorkItemSideInputDoFnRunner} handles streaming side inputs for a
* {@link StreamingGroupAlsoByWindowViaWindowSetFn}.
*/
private DoFnRunner<InputT, KV<K, Iterable<V>>> createRunner() {
OutputManager outputManager = new OutputManager() {
@Override
public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
checkState(tag.equals(mainOutputTag), "Must only output to main output tag (%s), but was %s", tag, mainOutputTag);
try {
receiver.process(output);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
};
boolean hasStreamingSideInput = options.as(StreamingOptions.class).isStreaming() && !sideInputReader.isEmpty();
DoFnRunner<InputT, KV<K, Iterable<V>>> basicRunner = new GroupAlsoByWindowFnRunner<>(options, doFn, sideInputReader, outputManager, mainOutputTag, stepContext);
if (doFn instanceof StreamingGroupAlsoByWindowViaWindowSetFn) {
DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>> streamingGABWRunner = (DoFnRunner<KeyedWorkItem<K, V>, KV<K, Iterable<V>>>) basicRunner;
if (hasStreamingSideInput) {
@SuppressWarnings("unchecked") WindmillKeyedWorkItem.FakeKeyedWorkItemCoder<K, V> keyedWorkItemCoder = (WindmillKeyedWorkItem.FakeKeyedWorkItemCoder<K, V>) inputCoder;
StreamingSideInputFetcher<V, W> sideInputFetcher = new StreamingSideInputFetcher<>(sideInputViews, keyedWorkItemCoder.getElementCoder(), windowingStrategy, (StreamingModeExecutionContext.StreamingModeStepContext) stepContext);
streamingGABWRunner = new StreamingKeyedWorkItemSideInputDoFnRunner<>(streamingGABWRunner, keyedWorkItemCoder.getKeyCoder(), sideInputFetcher, stepContext);
}
return (DoFnRunner<InputT, KV<K, Iterable<V>>>) DoFnRunners.<K, V, Iterable<V>, W>lateDataDroppingRunner(streamingGABWRunner, stepContext.timerInternals(), windowingStrategy);
} else {
if (hasStreamingSideInput) {
return new StreamingSideInputDoFnRunner<>(basicRunner, new StreamingSideInputFetcher<>(sideInputViews, inputCoder, windowingStrategy, (StreamingModeExecutionContext.StreamingModeStepContext) stepContext));
} else {
return basicRunner;
}
}
}
Aggregations