use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class StreamExecSink method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<Object> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
final RowType inputRowType = (RowType) inputEdge.getOutputType();
final DynamicTableSink tableSink = tableSinkSpec.getTableSink(planner.getFlinkContext());
final boolean isCollectSink = tableSink instanceof CollectDynamicSink;
final List<Integer> rowtimeFieldIndices = new ArrayList<>();
for (int i = 0; i < inputRowType.getFieldCount(); ++i) {
if (TypeCheckUtils.isRowTime(inputRowType.getTypeAt(i))) {
rowtimeFieldIndices.add(i);
}
}
final int rowtimeFieldIndex;
if (rowtimeFieldIndices.size() > 1 && !isCollectSink) {
throw new TableException(String.format("The query contains more than one rowtime attribute column [%s] for writing into table '%s'.\n" + "Please select the column that should be used as the event-time timestamp " + "for the table sink by casting all other columns to regular TIMESTAMP or TIMESTAMP_LTZ.", rowtimeFieldIndices.stream().map(i -> inputRowType.getFieldNames().get(i)).collect(Collectors.joining(", ")), tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString()));
} else if (rowtimeFieldIndices.size() == 1) {
rowtimeFieldIndex = rowtimeFieldIndices.get(0);
} else {
rowtimeFieldIndex = -1;
}
return createSinkTransformation(planner.getExecEnv(), config, inputTransform, tableSink, rowtimeFieldIndex, upsertMaterialize);
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class StreamExecTemporalJoin method translateToPlanInternal.
@Override
@SuppressWarnings("unchecked")
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
ExecEdge leftInputEdge = getInputEdges().get(0);
ExecEdge rightInputEdge = getInputEdges().get(1);
RowType leftInputType = (RowType) leftInputEdge.getOutputType();
RowType rightInputType = (RowType) rightInputEdge.getOutputType();
JoinUtil.validateJoinSpec(joinSpec, leftInputType, rightInputType, true);
FlinkJoinType joinType = joinSpec.getJoinType();
if (isTemporalFunctionJoin) {
if (joinType != FlinkJoinType.INNER) {
throw new ValidationException("Temporal table function join currently only support INNER JOIN, " + "but was " + joinType + " JOIN.");
}
} else {
if (joinType != FlinkJoinType.LEFT && joinType != FlinkJoinType.INNER) {
throw new TableException("Temporal table join currently only support INNER JOIN and LEFT JOIN, " + "but was " + joinType + " JOIN.");
}
}
RowType returnType = (RowType) getOutputType();
TwoInputStreamOperator<RowData, RowData, RowData> joinOperator = getJoinOperator(config, leftInputType, rightInputType);
Transformation<RowData> leftTransform = (Transformation<RowData>) leftInputEdge.translateToPlan(planner);
Transformation<RowData> rightTransform = (Transformation<RowData>) rightInputEdge.translateToPlan(planner);
TwoInputTransformation<RowData, RowData, RowData> ret = ExecNodeUtil.createTwoInputTransformation(leftTransform, rightTransform, createTransformationMeta(TEMPORAL_JOIN_TRANSFORMATION, config), joinOperator, InternalTypeInfo.of(returnType), leftTransform.getParallelism());
// set KeyType and Selector for state
RowDataKeySelector leftKeySelector = getLeftKeySelector(leftInputType);
RowDataKeySelector rightKeySelector = getRightKeySelector(rightInputType);
ret.setStateKeySelectors(leftKeySelector, rightKeySelector);
ret.setStateKeyType(leftKeySelector.getProducedType());
return ret;
}
use of org.apache.flink.api.dag.Transformation in project flink by apache.
the class DefaultExecutorTest method testJobName.
@Test
public void testJobName() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final Executor executor = new DefaultExecutor(env);
final List<Transformation<?>> dummyTransformations = Collections.singletonList(env.fromElements(1, 2, 3).addSink(new DiscardingSink<>()).getTransformation());
final Configuration configuration = new Configuration();
configuration.set(PipelineOptions.NAME, "Custom Name");
// default
testJobName(executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"), "Default Name");
// Table API specific
testJobName(executor.createPipeline(dummyTransformations, configuration, "Default Name"), "Custom Name");
// DataStream API specific
env.configure(configuration);
testJobName(executor.createPipeline(dummyTransformations, new Configuration(), "Default Name"), "Custom Name");
}
Aggregations