use of org.apache.flink.table.runtime.operators.multipleinput.input.InputSpec in project flink by apache.
the class BatchExecMultipleInput method translateToPlanInternal.
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
final List<Transformation<?>> inputTransforms = new ArrayList<>();
for (ExecEdge inputEdge : getInputEdges()) {
inputTransforms.add(inputEdge.translateToPlan(planner));
}
final Transformation<?> outputTransform = rootNode.translateToPlan(planner);
final int[] readOrders = getInputProperties().stream().map(InputProperty::getPriority).mapToInt(i -> i).toArray();
final TableOperatorWrapperGenerator generator = new TableOperatorWrapperGenerator(inputTransforms, outputTransform, readOrders);
generator.generate();
final List<Pair<Transformation<?>, InputSpec>> inputTransformAndInputSpecPairs = generator.getInputTransformAndInputSpecPairs();
final MultipleInputTransformation<RowData> multipleInputTransform = new MultipleInputTransformation<>(createTransformationName(config), new BatchMultipleInputStreamOperatorFactory(inputTransformAndInputSpecPairs.stream().map(Pair::getValue).collect(Collectors.toList()), generator.getHeadWrappers(), generator.getTailWrapper()), InternalTypeInfo.of(getOutputType()), generator.getParallelism());
multipleInputTransform.setDescription(createTransformationDescription(config));
inputTransformAndInputSpecPairs.forEach(input -> multipleInputTransform.addInput(input.getKey()));
if (generator.getMaxParallelism() > 0) {
multipleInputTransform.setMaxParallelism(generator.getMaxParallelism());
}
// set resources
multipleInputTransform.setResources(generator.getMinResources(), generator.getPreferredResources());
final int memoryWeight = generator.getManagedMemoryWeight();
final long memoryBytes = (long) memoryWeight << 20;
ExecNodeUtil.setManagedMemoryWeight(multipleInputTransform, memoryBytes);
// set chaining strategy for source chaining
multipleInputTransform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
return multipleInputTransform;
}
use of org.apache.flink.table.runtime.operators.multipleinput.input.InputSpec in project flink by apache.
the class TableOperatorWrapperGeneratorTest method testComplex.
/**
* Test for complex sub-graph in a multiple input node.
*
* <pre>
*
* source1 source2
* | |
* agg1 agg2
* \ /
* source4 source5 join1 source3
* \ / \ /
* join3 join2
* \ /
* join4
*
* </pre>
*/
@Test
public void testComplex() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Transformation<RowData> source1 = createSource(env, "source1");
Transformation<RowData> source2 = createSource(env, "source2");
Transformation<RowData> source3 = createSource(env, "source3");
Transformation<RowData> source4 = createSource(env, "source4");
Transformation<RowData> source5 = createSource(env, "source5");
OneInputTransformation<RowData, RowData> agg1 = createOneInputTransform(source1, "agg1", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType())));
agg1.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 1);
OneInputTransformation<RowData, RowData> agg2 = createOneInputTransform(source2, "agg2", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType())));
agg2.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 2);
TwoInputTransformation<RowData, RowData, RowData> join1 = createTwoInputTransform(agg1, agg2, "join1", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType())));
join1.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 3);
TwoInputTransformation<RowData, RowData, RowData> join2 = createTwoInputTransform(join1, source3, "join2", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType())));
join2.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 4);
TwoInputTransformation<RowData, RowData, RowData> join3 = createTwoInputTransform(source4, source5, "join3", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType())));
join3.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 5);
TwoInputTransformation<RowData, RowData, RowData> join4 = createTwoInputTransform(join2, join3, "join4", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType())));
join4.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 6);
TableOperatorWrapperGenerator generator = new TableOperatorWrapperGenerator(Arrays.asList(source1, source2, source3, source4, source5), join4, new int[] { 2, 3, 4, 0, 1 });
generator.generate();
TableOperatorWrapper<?> aggWrapper1 = createWrapper(agg1, 3, 1.0 / 21);
TableOperatorWrapper<?> aggWrapper2 = createWrapper(agg2, 4, 2.0 / 21);
TableOperatorWrapper<?> joinWrapper1 = createWrapper(join1, 2, 3.0 / 21);
joinWrapper1.addInput(aggWrapper1, 1);
joinWrapper1.addInput(aggWrapper2, 2);
TableOperatorWrapper<?> joinWrapper2 = createWrapper(join2, 1, 4.0 / 21);
joinWrapper2.addInput(joinWrapper1, 1);
TableOperatorWrapper<?> joinWrapper3 = createWrapper(join3, 5, 5.0 / 21);
TableOperatorWrapper<?> outputWrapper = createWrapper(join4, 0, 6.0 / 21);
outputWrapper.addInput(joinWrapper2, 1);
outputWrapper.addInput(joinWrapper3, 2);
assertEquals(Arrays.asList(Pair.of(source1, new InputSpec(1, 2, aggWrapper1, 1)), Pair.of(source2, new InputSpec(2, 3, aggWrapper2, 1)), Pair.of(source3, new InputSpec(3, 4, joinWrapper2, 2)), Pair.of(source4, new InputSpec(4, 0, joinWrapper3, 1)), Pair.of(source5, new InputSpec(5, 1, joinWrapper3, 2))), generator.getInputTransformAndInputSpecPairs());
assertEquals(outputWrapper, generator.getTailWrapper());
assertEquals(21, generator.getManagedMemoryWeight());
assertEquals(10, generator.getParallelism());
assertEquals(-1, generator.getMaxParallelism());
assertEquals(ResourceSpec.UNKNOWN, generator.getMinResources());
assertEquals(ResourceSpec.UNKNOWN, generator.getPreferredResources());
}
use of org.apache.flink.table.runtime.operators.multipleinput.input.InputSpec in project flink by apache.
the class BatchMultipleInputStreamOperator method endInput.
@Override
public void endInput(int inputId) throws Exception {
inputSelectionHandler.endInput(inputId);
InputSpec inputSpec = inputSpecMap.get(inputId);
inputSpec.getOutput().endOperatorInput(inputSpec.getOutputOpInputId());
}
use of org.apache.flink.table.runtime.operators.multipleinput.input.InputSpec in project flink by apache.
the class TableOperatorWrapperGenerator method processInput.
private void processInput(Transformation<?> input, int inputIdx, TableOperatorWrapper<?> outputWrapper, int outputOpInputId) {
int inputId = inputTransformAndInputSpecPairs.size() + 1;
InputSpec inputSpec = new InputSpec(inputId, readOrders[inputIdx], outputWrapper, outputOpInputId);
inputTransformAndInputSpecPairs.add(Pair.of(input, inputSpec));
}
use of org.apache.flink.table.runtime.operators.multipleinput.input.InputSpec in project flink by apache.
the class TableOperatorWrapperGeneratorTest method testSimple.
/**
* Test for simple sub-graph in a multiple input node.
*
* <pre>
*
* source1 source2
* | |
* agg1 agg2
* \ /
* join
*
* </pre>
*/
@Test
public void testSimple() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Transformation<RowData> source1 = createSource(env, "source1");
Transformation<RowData> source2 = createSource(env, "source2");
OneInputTransformation<RowData, RowData> agg1 = createOneInputTransform(source1, "agg1", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType())));
agg1.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 1);
OneInputTransformation<RowData, RowData> agg2 = createOneInputTransform(source2, "agg2", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType())));
agg2.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 2);
TwoInputTransformation<RowData, RowData, RowData> join = createTwoInputTransform(agg1, agg2, "join", InternalTypeInfo.of(RowType.of(DataTypes.STRING().getLogicalType(), DataTypes.STRING().getLogicalType())));
join.declareManagedMemoryUseCaseAtOperatorScope(ManagedMemoryUseCase.OPERATOR, 3);
TableOperatorWrapperGenerator generator = new TableOperatorWrapperGenerator(Arrays.asList(source1, source2), join, new int[] { 1, 0 });
generator.generate();
TableOperatorWrapper<?> headWrapper1 = createWrapper(agg1, 1, 1.0 / 6);
TableOperatorWrapper<?> headWrapper2 = createWrapper(agg2, 2, 2.0 / 6);
TableOperatorWrapper<?> outputWrapper = createWrapper(join, 0, 3.0 / 6);
outputWrapper.addInput(headWrapper1, 1);
outputWrapper.addInput(headWrapper2, 2);
assertEquals(Arrays.asList(Pair.of(source1, new InputSpec(1, 1, headWrapper1, 1)), Pair.of(source2, new InputSpec(2, 0, headWrapper2, 1))), generator.getInputTransformAndInputSpecPairs());
assertEquals(outputWrapper, generator.getTailWrapper());
assertEquals(6, generator.getManagedMemoryWeight());
assertEquals(10, generator.getParallelism());
assertEquals(-1, generator.getMaxParallelism());
assertEquals(ResourceSpec.UNKNOWN, generator.getMinResources());
assertEquals(ResourceSpec.UNKNOWN, generator.getPreferredResources());
}
Aggregations