use of org.apache.beam.runners.core.construction.SerializablePipelineOptions in project beam by apache.
the class BufferingDoFnRunnerTest method createBufferingDoFnRunner.
private static BufferingDoFnRunner createBufferingDoFnRunner(int concurrentCheckpoints, List<BufferingDoFnRunner.CheckpointIdentifier> notYetAcknowledgeCheckpoints) throws Exception {
DoFnRunner doFnRunner = Mockito.mock(DoFnRunner.class);
OperatorStateBackend operatorStateBackend = Mockito.mock(OperatorStateBackend.class);
// Setup not yet acknowledged checkpoint union list state
ListState unionListState = Mockito.mock(ListState.class);
Mockito.when(operatorStateBackend.getUnionListState(Mockito.any())).thenReturn(unionListState);
Mockito.when(unionListState.get()).thenReturn(notYetAcknowledgeCheckpoints);
// Setup buffer list state
Mockito.when(operatorStateBackend.getListState(Mockito.any())).thenReturn(Mockito.mock(ListState.class));
return BufferingDoFnRunner.create(doFnRunner, "stable-input", StringUtf8Coder.of(), WindowedValue.getFullCoder(VarIntCoder.of(), GlobalWindow.Coder.INSTANCE), operatorStateBackend, null, concurrentCheckpoints, new SerializablePipelineOptions(FlinkPipelineOptions.defaults()));
}
use of org.apache.beam.runners.core.construction.SerializablePipelineOptions in project beam by apache.
the class StreamingTransformTranslator method combineGrouped.
private static <K, InputT, OutputT> TransformEvaluator<Combine.GroupedValues<K, InputT, OutputT>> combineGrouped() {
return new TransformEvaluator<Combine.GroupedValues<K, InputT, OutputT>>() {
@Override
public void evaluate(final Combine.GroupedValues<K, InputT, OutputT> transform, EvaluationContext context) {
// get the applied combine function.
PCollection<? extends KV<K, ? extends Iterable<InputT>>> input = context.getInput(transform);
final WindowingStrategy<?, ?> windowingStrategy = input.getWindowingStrategy();
@SuppressWarnings("unchecked") final CombineWithContext.CombineFnWithContext<InputT, ?, OutputT> fn = (CombineWithContext.CombineFnWithContext<InputT, ?, OutputT>) CombineFnUtil.toFnWithContext(transform.getFn());
@SuppressWarnings("unchecked") UnboundedDataset<KV<K, Iterable<InputT>>> unboundedDataset = (UnboundedDataset<KV<K, Iterable<InputT>>>) context.borrowDataset(transform);
JavaDStream<WindowedValue<KV<K, Iterable<InputT>>>> dStream = unboundedDataset.getDStream();
final SerializablePipelineOptions options = context.getSerializableOptions();
final SparkPCollectionView pviews = context.getPViews();
JavaDStream<WindowedValue<KV<K, OutputT>>> outStream = dStream.transform(rdd -> {
SparkCombineFn<KV<K, InputT>, InputT, ?, OutputT> combineFnWithContext = SparkCombineFn.keyed(fn, options, TranslationUtils.getSideInputs(transform.getSideInputs(), new JavaSparkContext(rdd.context()), pviews), windowingStrategy);
return rdd.map(new TranslationUtils.CombineGroupedValues<>(combineFnWithContext));
});
context.putDataset(transform, new UnboundedDataset<>(outStream, unboundedDataset.getStreamSources()));
}
@Override
public String toNativeString() {
return "map(new <fn>())";
}
};
}
Aggregations