Search in sources :

Example 6 with ExecNodeConfig

use of org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig in project flink by apache.

the class StreamExecDataStreamScan method translateToPlanInternal.

@SuppressWarnings("unchecked")
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
    final Transformation<?> sourceTransform = dataStream.getTransformation();
    final Optional<RexNode> rowtimeExpr = getRowtimeExpression(planner.getRelBuilder());
    final Transformation<RowData> transformation;
    // conversion.
    if (rowtimeExpr.isPresent() || ScanUtil.needsConversion(sourceType)) {
        final String extractElement, resetElement;
        if (ScanUtil.hasTimeAttributeField(fieldIndexes)) {
            String elementTerm = OperatorCodeGenerator.ELEMENT();
            extractElement = String.format("ctx.%s = %s;", elementTerm, elementTerm);
            resetElement = String.format("ctx.%s = null;", elementTerm);
        } else {
            extractElement = "";
            resetElement = "";
        }
        final CodeGeneratorContext ctx = new CodeGeneratorContext(config.getTableConfig()).setOperatorBaseClass(TableStreamOperator.class);
        transformation = ScanUtil.convertToInternalRow(ctx, (Transformation<Object>) sourceTransform, fieldIndexes, sourceType, (RowType) getOutputType(), qualifiedName, (detailName, simplifyName) -> createFormattedTransformationName(detailName, simplifyName, config), (description) -> createFormattedTransformationDescription(description, config), JavaScalaConversionUtil.toScala(rowtimeExpr), extractElement, resetElement);
    } else {
        transformation = (Transformation<RowData>) sourceTransform;
    }
    return transformation;
}
Also used : TableStreamOperator(org.apache.flink.table.runtime.operators.TableStreamOperator) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) MultipleTransformationTranslator(org.apache.flink.table.planner.plan.nodes.exec.MultipleTransformationTranslator) RowType(org.apache.flink.table.types.logical.RowType) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) ExecNode(org.apache.flink.table.planner.plan.nodes.exec.ExecNode) TimestampType(org.apache.flink.table.types.logical.TimestampType) TimestampKind(org.apache.flink.table.types.logical.TimestampKind) ScanUtil(org.apache.flink.table.planner.plan.utils.ScanUtil) RexNode(org.apache.calcite.rex.RexNode) ROWTIME_STREAM_MARKER(org.apache.flink.table.typeutils.TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER) CodeGeneratorContext(org.apache.flink.table.planner.codegen.CodeGeneratorContext) TypeCheckUtils(org.apache.flink.table.runtime.typeutils.TypeCheckUtils) ExecNodeContext(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext) RowData(org.apache.flink.table.data.RowData) PlannerBase(org.apache.flink.table.planner.delegation.PlannerBase) ExecNodeConfig(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig) Collectors(java.util.stream.Collectors) DataStream(org.apache.flink.streaming.api.datastream.DataStream) OperatorCodeGenerator(org.apache.flink.table.planner.codegen.OperatorCodeGenerator) List(java.util.List) LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) FlinkSqlOperatorTable(org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable) JavaScalaConversionUtil(org.apache.flink.table.planner.utils.JavaScalaConversionUtil) Optional(java.util.Optional) ExecNodeBase(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase) Transformation(org.apache.flink.api.dag.Transformation) Collections(java.util.Collections) RowData(org.apache.flink.table.data.RowData) Transformation(org.apache.flink.api.dag.Transformation) CodeGeneratorContext(org.apache.flink.table.planner.codegen.CodeGeneratorContext) RowType(org.apache.flink.table.types.logical.RowType) RexNode(org.apache.calcite.rex.RexNode)

Example 7 with ExecNodeConfig

use of org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig in project flink by apache.

the class StreamExecSink method translateToPlanInternal.

@SuppressWarnings("unchecked")
@Override
protected Transformation<Object> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
    final ExecEdge inputEdge = getInputEdges().get(0);
    final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
    final RowType inputRowType = (RowType) inputEdge.getOutputType();
    final DynamicTableSink tableSink = tableSinkSpec.getTableSink(planner.getFlinkContext());
    final boolean isCollectSink = tableSink instanceof CollectDynamicSink;
    final List<Integer> rowtimeFieldIndices = new ArrayList<>();
    for (int i = 0; i < inputRowType.getFieldCount(); ++i) {
        if (TypeCheckUtils.isRowTime(inputRowType.getTypeAt(i))) {
            rowtimeFieldIndices.add(i);
        }
    }
    final int rowtimeFieldIndex;
    if (rowtimeFieldIndices.size() > 1 && !isCollectSink) {
        throw new TableException(String.format("The query contains more than one rowtime attribute column [%s] for writing into table '%s'.\n" + "Please select the column that should be used as the event-time timestamp " + "for the table sink by casting all other columns to regular TIMESTAMP or TIMESTAMP_LTZ.", rowtimeFieldIndices.stream().map(i -> inputRowType.getFieldNames().get(i)).collect(Collectors.joining(", ")), tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString()));
    } else if (rowtimeFieldIndices.size() == 1) {
        rowtimeFieldIndex = rowtimeFieldIndices.get(0);
    } else {
        rowtimeFieldIndex = -1;
    }
    return createSinkTransformation(planner.getExecEnv(), config, inputTransform, tableSink, rowtimeFieldIndex, upsertMaterialize);
}
Also used : InputProperty(org.apache.flink.table.planner.plan.nodes.exec.InputProperty) JsonCreator(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) CollectDynamicSink(org.apache.flink.table.planner.connectors.CollectDynamicSink) RowType(org.apache.flink.table.types.logical.RowType) JsonInclude(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude) ExecNode(org.apache.flink.table.planner.plan.nodes.exec.ExecNode) ArrayList(java.util.ArrayList) DynamicTableSinkSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec) FlinkVersion(org.apache.flink.FlinkVersion) TypeCheckUtils(org.apache.flink.table.runtime.typeutils.TypeCheckUtils) ExecNodeContext(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext) RowData(org.apache.flink.table.data.RowData) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) PlannerBase(org.apache.flink.table.planner.delegation.PlannerBase) ExecNodeMetadata(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata) ExecNodeConfig(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig) TableException(org.apache.flink.table.api.TableException) CommonExecSink(org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecSink) Collectors(java.util.stream.Collectors) JsonProperty(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty) List(java.util.List) ExecEdge(org.apache.flink.table.planner.plan.nodes.exec.ExecEdge) LogicalType(org.apache.flink.table.types.logical.LogicalType) Transformation(org.apache.flink.api.dag.Transformation) Collections(java.util.Collections) Transformation(org.apache.flink.api.dag.Transformation) TableException(org.apache.flink.table.api.TableException) ExecEdge(org.apache.flink.table.planner.plan.nodes.exec.ExecEdge) ArrayList(java.util.ArrayList) RowType(org.apache.flink.table.types.logical.RowType) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) CollectDynamicSink(org.apache.flink.table.planner.connectors.CollectDynamicSink) RowData(org.apache.flink.table.data.RowData)

Aggregations

Transformation (org.apache.flink.api.dag.Transformation)7 RowData (org.apache.flink.table.data.RowData)7 ExecNode (org.apache.flink.table.planner.plan.nodes.exec.ExecNode)7 ExecNodeConfig (org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig)7 ExecNodeContext (org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext)7 List (java.util.List)6 PlannerBase (org.apache.flink.table.planner.delegation.PlannerBase)6 RowType (org.apache.flink.table.types.logical.RowType)6 CodeGeneratorContext (org.apache.flink.table.planner.codegen.CodeGeneratorContext)5 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)5 InputProperty (org.apache.flink.table.planner.plan.nodes.exec.InputProperty)5 Collections (java.util.Collections)4 TableException (org.apache.flink.table.api.TableException)4 ExecNodeUtil (org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil)4 InternalTypeInfo (org.apache.flink.table.runtime.typeutils.InternalTypeInfo)4 ArrayList (java.util.ArrayList)3 Arrays (java.util.Arrays)3 Collectors (java.util.stream.Collectors)3 AggregateCall (org.apache.calcite.rel.core.AggregateCall)3 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)3