use of org.apache.flink.streaming.api.transformations.OneInputTransformation in project flink by apache.
the class PythonOperatorChainingOptimizer method replaceInput.
private static void replaceInput(Transformation<?> transformation, Transformation<?> oldInput, Transformation<?> newInput) {
try {
if (transformation instanceof OneInputTransformation || transformation instanceof FeedbackTransformation || transformation instanceof SideOutputTransformation || transformation instanceof ReduceTransformation || transformation instanceof SinkTransformation || transformation instanceof LegacySinkTransformation || transformation instanceof TimestampsAndWatermarksTransformation || transformation instanceof PartitionTransformation) {
final Field inputField = transformation.getClass().getDeclaredField("input");
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else if (transformation instanceof TwoInputTransformation) {
final Field inputField;
if (((TwoInputTransformation<?, ?, ?>) transformation).getInput1() == oldInput) {
inputField = transformation.getClass().getDeclaredField("input1");
} else {
inputField = transformation.getClass().getDeclaredField("input2");
}
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else if (transformation instanceof UnionTransformation || transformation instanceof AbstractMultipleInputTransformation) {
final Field inputsField = transformation.getClass().getDeclaredField("inputs");
inputsField.setAccessible(true);
List<Transformation<?>> newInputs = Lists.newArrayList();
newInputs.addAll(transformation.getInputs());
newInputs.remove(oldInput);
newInputs.add(newInput);
inputsField.set(transformation, newInputs);
} else if (transformation instanceof AbstractBroadcastStateTransformation) {
final Field inputField;
if (((AbstractBroadcastStateTransformation<?, ?, ?>) transformation).getRegularInput() == oldInput) {
inputField = transformation.getClass().getDeclaredField("regularInput");
} else {
inputField = transformation.getClass().getDeclaredField("broadcastInput");
}
inputField.setAccessible(true);
inputField.set(transformation, newInput);
} else {
throw new RuntimeException("Unsupported transformation: " + transformation);
}
} catch (NoSuchFieldException | IllegalAccessException e) {
// This should never happen
throw new RuntimeException(e);
}
}
use of org.apache.flink.streaming.api.transformations.OneInputTransformation in project beam by apache.
the class FlinkStreamingTransformTranslatorsTest method readSourceTranslatorUnboundedWithMaxParallelism.
@Test
public void readSourceTranslatorUnboundedWithMaxParallelism() {
final int maxParallelism = 6;
final int parallelism = 2;
SplittableParDo.PrimitiveUnboundedRead<String> transform = new SplittableParDo.PrimitiveUnboundedRead<>(Read.from(new TestUnboundedSource()));
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(parallelism);
env.setMaxParallelism(maxParallelism);
OneInputTransformation<?, ?> sourceTransform = (OneInputTransformation) applyReadSourceTransform(transform, PCollection.IsBounded.UNBOUNDED, env);
UnboundedSourceWrapper source = (UnboundedSourceWrapper) SourceTransformationCompat.getOperator(StreamSources.getOnlyInput(sourceTransform)).getUserFunction();
assertEquals(maxParallelism, source.getSplitSources().size());
}
use of org.apache.flink.streaming.api.transformations.OneInputTransformation in project flink by apache.
the class StreamingJobGraphGeneratorTest method testOperatorCoordinatorAddedToJobVertex.
@Test
public void testOperatorCoordinatorAddedToJobVertex() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> stream = env.fromSource(new MockSource(Boundedness.BOUNDED, 1), WatermarkStrategy.noWatermarks(), "TestingSource");
OneInputTransformation<Integer, Integer> resultTransform = new OneInputTransformation<Integer, Integer>(stream.getTransformation(), "AnyName", new CoordinatedTransformOperatorFactory(), BasicTypeInfo.INT_TYPE_INFO, env.getParallelism());
new TestingSingleOutputStreamOperator<>(env, resultTransform).print();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
assertEquals(2, jobGraph.getVerticesAsArray()[0].getOperatorCoordinators().size());
}
use of org.apache.flink.streaming.api.transformations.OneInputTransformation in project flink by apache.
the class WindowTranslationTest method testApplyEventTime.
// ------------------------------------------------------------------------
// Apply Translation Tests
// ------------------------------------------------------------------------
@Test
@SuppressWarnings("rawtypes")
public void testApplyEventTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple2<String, Integer>> window1 = source.keyBy(new TupleKeySelector()).window(TumblingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS))).apply(new WindowFunction<Tuple2<String, Integer>, Tuple2<String, Integer>, String, TimeWindow>() {
private static final long serialVersionUID = 1L;
@Override
public void apply(String key, TimeWindow window, Iterable<Tuple2<String, Integer>> values, Collector<Tuple2<String, Integer>> out) throws Exception {
for (Tuple2<String, Integer> in : values) {
out.collect(in);
}
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>>) window1.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple2<String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof EventTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingEventTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ListStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
use of org.apache.flink.streaming.api.transformations.OneInputTransformation in project flink by apache.
the class WindowTranslationTest method testAggregateProcessingTime.
@Test
public void testAggregateProcessingTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Tuple3<String, String, Integer>> source = env.fromElements(Tuple3.of("hello", "hallo", 1), Tuple3.of("hello", "hallo", 2));
DataStream<Integer> window1 = source.keyBy(new Tuple3KeySelector()).window(SlidingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))).aggregate(new DummyAggregationFunction());
final OneInputTransformation<Tuple3<String, String, Integer>, Integer> transform = (OneInputTransformation<Tuple3<String, String, Integer>, Integer>) window1.getTransformation();
final OneInputStreamOperator<Tuple3<String, String, Integer>, Integer> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple3<String, String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingProcessingTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof AggregatingStateDescriptor);
processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple3<>("hello", "hallo", 1));
}
Aggregations