Search in sources :

Example 6 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class TypeInferenceOperandChecker method insertImplicitCasts.

private void insertImplicitCasts(SqlCallBinding callBinding, List<DataType> expectedDataTypes) {
    final FlinkTypeFactory flinkTypeFactory = unwrapTypeFactory(callBinding);
    final List<SqlNode> operands = callBinding.operands();
    for (int i = 0; i < operands.size(); i++) {
        final LogicalType expectedType = expectedDataTypes.get(i).getLogicalType();
        final LogicalType argumentType = toLogicalType(callBinding.getOperandType(i));
        if (!supportsAvoidingCast(argumentType, expectedType)) {
            final RelDataType expectedRelDataType = flinkTypeFactory.createFieldTypeFromLogicalType(expectedType);
            final SqlNode castedOperand = castTo(operands.get(i), expectedRelDataType);
            callBinding.getCall().setOperand(i, castedOperand);
            updateInferredType(callBinding.getValidator(), castedOperand, expectedRelDataType);
        }
    }
}
Also used : FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) FlinkTypeFactory.toLogicalType(org.apache.flink.table.planner.calcite.FlinkTypeFactory.toLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlNode(org.apache.calcite.sql.SqlNode)

Example 7 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class DynamicTableSourceSpecSerdeTest method testDynamicTableSinkSpecSerde.

public static Stream<DynamicTableSourceSpec> testDynamicTableSinkSpecSerde() {
    Map<String, String> options1 = new HashMap<>();
    options1.put("connector", FileSystemTableFactory.IDENTIFIER);
    options1.put("format", TestCsvFormatFactory.IDENTIFIER);
    options1.put("path", "/tmp");
    final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
    final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
    DynamicTableSourceSpec spec1 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
    Map<String, String> options2 = new HashMap<>();
    options2.put("connector", TestValuesTableFactory.IDENTIFIER);
    options2.put("disable-lookup", "true");
    options2.put("enable-watermark-push-down", "true");
    options2.put("filterable-fields", "b");
    options2.put("bounded", "false");
    options2.put("readable-metadata", "m1:INT, m2:STRING");
    final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("c", DataTypes.STRING()), Column.physical("p", DataTypes.STRING()), Column.metadata("m1", DataTypes.INT(), null, false), Column.metadata("m2", DataTypes.STRING(), null, false), Column.physical("ts", DataTypes.TIMESTAMP(3))), Collections.emptyList(), null);
    final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
    FlinkTypeFactory factory = FlinkTypeFactory.INSTANCE();
    RexBuilder rexBuilder = new RexBuilder(factory);
    DynamicTableSourceSpec spec2 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new ProjectPushDownSpec(new int[][] { { 0 }, { 1 }, { 4 }, { 6 } }, RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new ReadingMetadataSpec(Arrays.asList("m1", "m2"), RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new FilterPushDownSpec(Collections.singletonList(// b >= 10
    rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))))), new WatermarkPushDownSpec(rexBuilder.makeCall(SqlStdOperatorTable.MINUS, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.TIMESTAMP, 3), 3), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(1000), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO))), 5000, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new SourceWatermarkSpec(true, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new LimitPushDownSpec(100), new PartitionPushDownSpec(Arrays.asList(new HashMap<String, String>() {

        {
            put("p", "A");
        }
    }, new HashMap<String, String>() {

        {
            put("p", "B");
        }
    }))));
    return Stream.of(spec1, spec2);
}
Also used : WatermarkPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.WatermarkPushDownSpec) HashMap(java.util.HashMap) ProjectPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.ProjectPushDownSpec) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) DynamicTableSourceSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSourceSpec) LogicalType(org.apache.flink.table.types.logical.LogicalType) BigIntType(org.apache.flink.table.types.logical.BigIntType) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ReadingMetadataSpec(org.apache.flink.table.planner.plan.abilities.source.ReadingMetadataSpec) BigDecimal(java.math.BigDecimal) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SourceWatermarkSpec(org.apache.flink.table.planner.plan.abilities.source.SourceWatermarkSpec) LimitPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.LimitPushDownSpec) PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) FilterPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.FilterPushDownSpec) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) TimestampType(org.apache.flink.table.types.logical.TimestampType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 8 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class ExpressionConverter method visit.

@Override
public RexNode visit(ValueLiteralExpression valueLiteral) {
    LogicalType type = fromDataTypeToLogicalType(valueLiteral.getOutputDataType());
    RexBuilder rexBuilder = relBuilder.getRexBuilder();
    FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory();
    RelDataType relDataType = typeFactory.createFieldTypeFromLogicalType(type);
    if (valueLiteral.isNull()) {
        return rexBuilder.makeNullLiteral(relDataType);
    }
    Object value = null;
    switch(type.getTypeRoot()) {
        case DECIMAL:
        case TINYINT:
        case SMALLINT:
        case INTEGER:
        case BIGINT:
        case FLOAT:
        case DOUBLE:
            value = extractValue(valueLiteral, BigDecimal.class);
            break;
        case VARCHAR:
        case CHAR:
            value = extractValue(valueLiteral, String.class);
            break;
        case BINARY:
        case VARBINARY:
            value = new ByteString(extractValue(valueLiteral, byte[].class));
            break;
        case INTERVAL_YEAR_MONTH:
            // convert to total months
            value = BigDecimal.valueOf(extractValue(valueLiteral, Period.class).toTotalMonths());
            break;
        case INTERVAL_DAY_TIME:
            // TODO planner supports only milliseconds precision
            // convert to total millis
            value = BigDecimal.valueOf(extractValue(valueLiteral, Duration.class).toMillis());
            break;
        case DATE:
            value = DateString.fromDaysSinceEpoch((int) extractValue(valueLiteral, LocalDate.class).toEpochDay());
            break;
        case TIME_WITHOUT_TIME_ZONE:
            // TODO type factory strips the precision, for literals we can be more lenient
            // already
            // Moreover conversion from long supports precision up to TIME(3) planner does not
            // support higher
            // precisions
            TimeType timeType = (TimeType) type;
            int precision = timeType.getPrecision();
            relDataType = typeFactory.createSqlType(SqlTypeName.TIME, Math.min(precision, 3));
            value = TimeString.fromMillisOfDay(extractValue(valueLiteral, LocalTime.class).get(ChronoField.MILLI_OF_DAY));
            break;
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            LocalDateTime datetime = extractValue(valueLiteral, LocalDateTime.class);
            value = fromLocalDateTime(datetime);
            break;
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            // normalize to UTC
            Instant instant = extractValue(valueLiteral, Instant.class);
            value = fromLocalDateTime(instant.atOffset(ZoneOffset.UTC).toLocalDateTime());
            break;
        default:
            value = extractValue(valueLiteral, Object.class);
            if (value instanceof TimePointUnit) {
                value = commonToCalcite((TimePointUnit) value);
            } else if (value instanceof TimeIntervalUnit) {
                value = commonToCalcite((TimeIntervalUnit) value);
            }
            break;
    }
    return rexBuilder.makeLiteral(value, relDataType, // RexBuilder#makeCast.
    true);
}
Also used : LocalDateTime(java.time.LocalDateTime) TimestampStringUtils.fromLocalDateTime(org.apache.flink.table.planner.utils.TimestampStringUtils.fromLocalDateTime) TimeIntervalUnit(org.apache.flink.table.expressions.TimeIntervalUnit) LocalTime(java.time.LocalTime) ByteString(org.apache.calcite.avatica.util.ByteString) Instant(java.time.Instant) LogicalTypeDataTypeConverter.fromDataTypeToLogicalType(org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) Period(java.time.Period) RelDataType(org.apache.calcite.rel.type.RelDataType) Duration(java.time.Duration) TimeString(org.apache.calcite.util.TimeString) DateString(org.apache.calcite.util.DateString) ByteString(org.apache.calcite.avatica.util.ByteString) BigDecimal(java.math.BigDecimal) TimeType(org.apache.flink.table.types.logical.TimeType) TimePointUnit(org.apache.flink.table.expressions.TimePointUnit) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder)

Example 9 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class BridgingSqlFunction method of.

/**
 * Creates an instance of a scalar or table function during translation.
 */
public static BridgingSqlFunction of(RelOptCluster cluster, ContextResolvedFunction resolvedFunction) {
    final FlinkContext context = ShortcutUtils.unwrapContext(cluster);
    final FlinkTypeFactory typeFactory = ShortcutUtils.unwrapTypeFactory(cluster);
    return of(context, typeFactory, resolvedFunction);
}
Also used : FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) FlinkContext(org.apache.flink.table.planner.calcite.FlinkContext)

Example 10 with FlinkTypeFactory

use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.

the class TryCastConverter method convert.

@Override
public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) {
    checkArgumentNumber(call, 2);
    final FlinkTypeFactory typeFactory = context.getTypeFactory();
    final RexNode child = context.toRexNode(call.getChildren().get(0));
    final TypeLiteralExpression targetType = (TypeLiteralExpression) call.getChildren().get(1);
    RelDataType targetRelDataType = typeFactory.createTypeWithNullability(typeFactory.createFieldTypeFromLogicalType(targetType.getOutputDataType().getLogicalType()), true);
    return context.getRelBuilder().getRexBuilder().makeCall(targetRelDataType, FlinkSqlOperatorTable.TRY_CAST, Collections.singletonList(child));
}
Also used : FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) TypeLiteralExpression(org.apache.flink.table.expressions.TypeLiteralExpression) RelDataType(org.apache.calcite.rel.type.RelDataType) RexNode(org.apache.calcite.rex.RexNode)

Aggregations

FlinkTypeFactory (org.apache.flink.table.planner.calcite.FlinkTypeFactory)18 RelDataType (org.apache.calcite.rel.type.RelDataType)13 RexNode (org.apache.calcite.rex.RexNode)9 LogicalType (org.apache.flink.table.types.logical.LogicalType)8 ArrayList (java.util.ArrayList)7 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)6 List (java.util.List)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5 RowType (org.apache.flink.table.types.logical.RowType)4 BigDecimal (java.math.BigDecimal)3 Arrays (java.util.Arrays)3 Collections (java.util.Collections)3 Collectors (java.util.stream.Collectors)3 LogicalTableScan (org.apache.calcite.rel.logical.LogicalTableScan)3 RexInputRef (org.apache.calcite.rex.RexInputRef)3 TableException (org.apache.flink.table.api.TableException)3 Map (java.util.Map)2 Optional (java.util.Optional)2 Set (java.util.Set)2 Function (java.util.function.Function)2