use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.
the class TypeInferenceOperandChecker method insertImplicitCasts.
private void insertImplicitCasts(SqlCallBinding callBinding, List<DataType> expectedDataTypes) {
final FlinkTypeFactory flinkTypeFactory = unwrapTypeFactory(callBinding);
final List<SqlNode> operands = callBinding.operands();
for (int i = 0; i < operands.size(); i++) {
final LogicalType expectedType = expectedDataTypes.get(i).getLogicalType();
final LogicalType argumentType = toLogicalType(callBinding.getOperandType(i));
if (!supportsAvoidingCast(argumentType, expectedType)) {
final RelDataType expectedRelDataType = flinkTypeFactory.createFieldTypeFromLogicalType(expectedType);
final SqlNode castedOperand = castTo(operands.get(i), expectedRelDataType);
callBinding.getCall().setOperand(i, castedOperand);
updateInferredType(callBinding.getValidator(), castedOperand, expectedRelDataType);
}
}
}
use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.
the class DynamicTableSourceSpecSerdeTest method testDynamicTableSinkSpecSerde.
public static Stream<DynamicTableSourceSpec> testDynamicTableSinkSpecSerde() {
Map<String, String> options1 = new HashMap<>();
options1.put("connector", FileSystemTableFactory.IDENTIFIER);
options1.put("format", TestCsvFormatFactory.IDENTIFIER);
options1.put("path", "/tmp");
final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
DynamicTableSourceSpec spec1 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
Map<String, String> options2 = new HashMap<>();
options2.put("connector", TestValuesTableFactory.IDENTIFIER);
options2.put("disable-lookup", "true");
options2.put("enable-watermark-push-down", "true");
options2.put("filterable-fields", "b");
options2.put("bounded", "false");
options2.put("readable-metadata", "m1:INT, m2:STRING");
final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("c", DataTypes.STRING()), Column.physical("p", DataTypes.STRING()), Column.metadata("m1", DataTypes.INT(), null, false), Column.metadata("m2", DataTypes.STRING(), null, false), Column.physical("ts", DataTypes.TIMESTAMP(3))), Collections.emptyList(), null);
final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
FlinkTypeFactory factory = FlinkTypeFactory.INSTANCE();
RexBuilder rexBuilder = new RexBuilder(factory);
DynamicTableSourceSpec spec2 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new ProjectPushDownSpec(new int[][] { { 0 }, { 1 }, { 4 }, { 6 } }, RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new ReadingMetadataSpec(Arrays.asList("m1", "m2"), RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new FilterPushDownSpec(Collections.singletonList(// b >= 10
rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))))), new WatermarkPushDownSpec(rexBuilder.makeCall(SqlStdOperatorTable.MINUS, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.TIMESTAMP, 3), 3), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(1000), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO))), 5000, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new SourceWatermarkSpec(true, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new LimitPushDownSpec(100), new PartitionPushDownSpec(Arrays.asList(new HashMap<String, String>() {
{
put("p", "A");
}
}, new HashMap<String, String>() {
{
put("p", "B");
}
}))));
return Stream.of(spec1, spec2);
}
use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.
the class ExpressionConverter method visit.
@Override
public RexNode visit(ValueLiteralExpression valueLiteral) {
LogicalType type = fromDataTypeToLogicalType(valueLiteral.getOutputDataType());
RexBuilder rexBuilder = relBuilder.getRexBuilder();
FlinkTypeFactory typeFactory = (FlinkTypeFactory) relBuilder.getTypeFactory();
RelDataType relDataType = typeFactory.createFieldTypeFromLogicalType(type);
if (valueLiteral.isNull()) {
return rexBuilder.makeNullLiteral(relDataType);
}
Object value = null;
switch(type.getTypeRoot()) {
case DECIMAL:
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
value = extractValue(valueLiteral, BigDecimal.class);
break;
case VARCHAR:
case CHAR:
value = extractValue(valueLiteral, String.class);
break;
case BINARY:
case VARBINARY:
value = new ByteString(extractValue(valueLiteral, byte[].class));
break;
case INTERVAL_YEAR_MONTH:
// convert to total months
value = BigDecimal.valueOf(extractValue(valueLiteral, Period.class).toTotalMonths());
break;
case INTERVAL_DAY_TIME:
// TODO planner supports only milliseconds precision
// convert to total millis
value = BigDecimal.valueOf(extractValue(valueLiteral, Duration.class).toMillis());
break;
case DATE:
value = DateString.fromDaysSinceEpoch((int) extractValue(valueLiteral, LocalDate.class).toEpochDay());
break;
case TIME_WITHOUT_TIME_ZONE:
// TODO type factory strips the precision, for literals we can be more lenient
// already
// Moreover conversion from long supports precision up to TIME(3) planner does not
// support higher
// precisions
TimeType timeType = (TimeType) type;
int precision = timeType.getPrecision();
relDataType = typeFactory.createSqlType(SqlTypeName.TIME, Math.min(precision, 3));
value = TimeString.fromMillisOfDay(extractValue(valueLiteral, LocalTime.class).get(ChronoField.MILLI_OF_DAY));
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
LocalDateTime datetime = extractValue(valueLiteral, LocalDateTime.class);
value = fromLocalDateTime(datetime);
break;
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
// normalize to UTC
Instant instant = extractValue(valueLiteral, Instant.class);
value = fromLocalDateTime(instant.atOffset(ZoneOffset.UTC).toLocalDateTime());
break;
default:
value = extractValue(valueLiteral, Object.class);
if (value instanceof TimePointUnit) {
value = commonToCalcite((TimePointUnit) value);
} else if (value instanceof TimeIntervalUnit) {
value = commonToCalcite((TimeIntervalUnit) value);
}
break;
}
return rexBuilder.makeLiteral(value, relDataType, // RexBuilder#makeCast.
true);
}
use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.
the class BridgingSqlFunction method of.
/**
* Creates an instance of a scalar or table function during translation.
*/
public static BridgingSqlFunction of(RelOptCluster cluster, ContextResolvedFunction resolvedFunction) {
final FlinkContext context = ShortcutUtils.unwrapContext(cluster);
final FlinkTypeFactory typeFactory = ShortcutUtils.unwrapTypeFactory(cluster);
return of(context, typeFactory, resolvedFunction);
}
use of org.apache.flink.table.planner.calcite.FlinkTypeFactory in project flink by apache.
the class TryCastConverter method convert.
@Override
public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) {
checkArgumentNumber(call, 2);
final FlinkTypeFactory typeFactory = context.getTypeFactory();
final RexNode child = context.toRexNode(call.getChildren().get(0));
final TypeLiteralExpression targetType = (TypeLiteralExpression) call.getChildren().get(1);
RelDataType targetRelDataType = typeFactory.createTypeWithNullability(typeFactory.createFieldTypeFromLogicalType(targetType.getOutputDataType().getLogicalType()), true);
return context.getRelBuilder().getRexBuilder().makeCall(targetRelDataType, FlinkSqlOperatorTable.TRY_CAST, Collections.singletonList(child));
}
Aggregations