use of org.apache.flink.streaming.api.transformations.LegacySinkTransformation in project flink by apache.
the class DataStreamSink method forSinkFunction.
static <T> DataStreamSink<T> forSinkFunction(DataStream<T> inputStream, SinkFunction<T> sinkFunction) {
StreamSink<T> sinkOperator = new StreamSink<>(sinkFunction);
final StreamExecutionEnvironment executionEnvironment = inputStream.getExecutionEnvironment();
PhysicalTransformation<T> transformation = new LegacySinkTransformation<>(inputStream.getTransformation(), "Unnamed", sinkOperator, executionEnvironment.getParallelism());
executionEnvironment.addOperator(transformation);
return new DataStreamSink<>(transformation);
}
use of org.apache.flink.streaming.api.transformations.LegacySinkTransformation in project flink by apache.
the class CommonExecSink method createSinkFunctionTransformation.
private Transformation<?> createSinkFunctionTransformation(SinkFunction<RowData> sinkFunction, StreamExecutionEnvironment env, Transformation<RowData> inputTransformation, int rowtimeFieldIndex, TransformationMetadata transformationMetadata, int sinkParallelism) {
final SinkOperator operator = new SinkOperator(env.clean(sinkFunction), rowtimeFieldIndex);
if (sinkFunction instanceof InputTypeConfigurable) {
((InputTypeConfigurable) sinkFunction).setInputType(getInputTypeInfo(), env.getConfig());
}
final Transformation<?> transformation = new LegacySinkTransformation<>(inputTransformation, transformationMetadata.getName(), SimpleOperatorFactory.of(operator), sinkParallelism);
transformationMetadata.fill(transformation);
return transformation;
}
use of org.apache.flink.streaming.api.transformations.LegacySinkTransformation in project flink by apache.
the class PythonOperatorChainingOptimizer method replaceInput.
private static void replaceInput(Transformation<?> transformation, Transformation<?> oldInput, Transformation<?> newInput) {
try {
if (transformation instanceof OneInputTransformation || transformation instanceof FeedbackTransformation || transformation instanceof SideOutputTransformation || transformation instanceof ReduceTransformation || transformation instanceof SinkTransformation || transformation instanceof LegacySinkTransformation || transformation instanceof TimestampsAndWatermarksTransformation || transformation instanceof PartitionTransformation) {
final Field inputField = transformation.getClass().getDeclaredField("input");
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else if (transformation instanceof TwoInputTransformation) {
final Field inputField;
if (((TwoInputTransformation<?, ?, ?>) transformation).getInput1() == oldInput) {
inputField = transformation.getClass().getDeclaredField("input1");
} else {
inputField = transformation.getClass().getDeclaredField("input2");
}
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else if (transformation instanceof UnionTransformation || transformation instanceof AbstractMultipleInputTransformation) {
final Field inputsField = transformation.getClass().getDeclaredField("inputs");
inputsField.setAccessible(true);
List<Transformation<?>> newInputs = Lists.newArrayList();
newInputs.addAll(transformation.getInputs());
newInputs.remove(oldInput);
newInputs.add(newInput);
inputsField.set(transformation, newInputs);
} else if (transformation instanceof AbstractBroadcastStateTransformation) {
final Field inputField;
if (((AbstractBroadcastStateTransformation<?, ?, ?>) transformation).getRegularInput() == oldInput) {
inputField = transformation.getClass().getDeclaredField("regularInput");
} else {
inputField = transformation.getClass().getDeclaredField("broadcastInput");
}
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else {
throw new RuntimeException("Unsupported transformation: " + transformation);
}
} catch (NoSuchFieldException | IllegalAccessException e) {
// This should never happen
throw new RuntimeException(e);
}
}
use of org.apache.flink.streaming.api.transformations.LegacySinkTransformation in project flink by apache.
the class LegacySinkTransformationTranslator method translateInternal.
private Collection<Integer> translateInternal(final LegacySinkTransformation<IN> transformation, final Context context) {
checkNotNull(transformation);
checkNotNull(context);
final StreamGraph streamGraph = context.getStreamGraph();
final String slotSharingGroup = context.getSlotSharingGroup();
final int transformationId = transformation.getId();
final ExecutionConfig executionConfig = streamGraph.getExecutionConfig();
final List<Transformation<?>> parentTransformations = transformation.getInputs();
checkState(parentTransformations.size() == 1, "Expected exactly one input transformation but found " + parentTransformations.size());
final Transformation<?> input = parentTransformations.get(0);
streamGraph.addSink(transformationId, slotSharingGroup, transformation.getCoLocationGroupKey(), transformation.getOperatorFactory(), input.getOutputType(), null, "Sink: " + transformation.getName());
final int parallelism = transformation.getParallelism() != ExecutionConfig.PARALLELISM_DEFAULT ? transformation.getParallelism() : executionConfig.getParallelism();
streamGraph.setParallelism(transformationId, parallelism);
streamGraph.setMaxParallelism(transformationId, transformation.getMaxParallelism());
for (Integer inputId : context.getStreamNodeIds(input)) {
streamGraph.addEdge(inputId, transformationId, 0);
}
if (transformation.getStateKeySelector() != null) {
TypeSerializer<?> keySerializer = transformation.getStateKeyType().createSerializer(executionConfig);
streamGraph.setOneInputStateKey(transformationId, transformation.getStateKeySelector(), keySerializer);
}
return Collections.emptyList();
}
Aggregations