Search in sources :

Example 81 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class TypeInferenceOperandInference method inferOperandTypesOrError.

// --------------------------------------------------------------------------------------------
private void inferOperandTypesOrError(FlinkTypeFactory typeFactory, CallContext callContext, RelDataType[] operandTypes) {
    final List<DataType> expectedDataTypes;
    // typed arguments have highest priority
    if (typeInference.getTypedArguments().isPresent()) {
        expectedDataTypes = typeInference.getTypedArguments().get();
    } else {
        expectedDataTypes = typeInference.getInputTypeStrategy().inferInputTypes(callContext, false).orElse(null);
    }
    // early out for invalid input
    if (expectedDataTypes == null || expectedDataTypes.size() != operandTypes.length) {
        return;
    }
    for (int i = 0; i < operandTypes.length; i++) {
        final LogicalType inferredType = expectedDataTypes.get(i).getLogicalType();
        operandTypes[i] = typeFactory.createFieldTypeFromLogicalType(inferredType);
    }
}
Also used : DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 82 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class MapAndMultisetToStringCastRule method generateCodeBlockInternal.

/* Example generated code for MAP<STRING, INTERVAL MONTH> -> CHAR(12):

    isNull$0 = _myInputIsNull;
    if (!isNull$0) {
        org.apache.flink.table.data.ArrayData keys$2 = _myInput.keyArray();
        org.apache.flink.table.data.ArrayData values$3 = _myInput.valueArray();
        builder$1.setLength(0);
        builder$1.append("{");
        for (int i$5 = 0; i$5 < _myInput.size(); i$5++) {
            if (builder$1.length() > 12) {
                break;
            }
            if (i$5 != 0) {
                builder$1.append(", ");
            }
            org.apache.flink.table.data.binary.BinaryStringData key$6 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
            boolean keyIsNull$7 = keys$2.isNullAt(i$5);
            int value$8 = -1;
            boolean valueIsNull$9 = values$3.isNullAt(i$5);
            if (!keyIsNull$7) {
                key$6 = ((org.apache.flink.table.data.binary.BinaryStringData) keys$2.getString(i$5));
                builder$1.append(key$6);
            } else {
                builder$1.append("NULL");
            }
            builder$1.append("=");
            if (!valueIsNull$9) {
                value$8 = values$3.getInt(i$5);
                isNull$2 = valueIsNull$9;
                if (!isNull$2) {
                    result$3 = org.apache.flink.table.data.binary.BinaryStringData.fromString("" + value$8);
                    isNull$2 = result$3 == null;
                } else {
                    result$3 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
                }
                builder$1.append(result$3);
            } else {
                builder$1.append("NULL");
            }
        }
        builder$1.append("}");
        java.lang.String resultString$4;
        resultString$4 = builder$1.toString();
        if (builder$1.length() > 12) {
            resultString$4 = builder$1.substring(0, java.lang.Math.min(builder$1.length(), 12));
        } else {
            if (resultString$.length() < 12) {
                int padLength$10;
                padLength$10 = 12 - resultString$.length();
                java.lang.StringBuilder sbPadding$11;
                sbPadding$11 = new java.lang.StringBuilder();
                for (int i$12 = 0; i$12 < padLength$10; i$12++) {
                    sbPadding$11.append(" ");
                }
                resultString$4 = resultString$4 + sbPadding$11.toString();
            }
        }
        result$1 = org.apache.flink.table.data.binary.BinaryStringData.fromString(resultString$4);
        isNull$0 = result$1 == null;
    } else {
        result$1 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
    }

    */
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
    final LogicalType keyType = inputLogicalType.is(LogicalTypeRoot.MULTISET) ? ((MultisetType) inputLogicalType).getElementType() : ((MapType) inputLogicalType).getKeyType();
    final LogicalType valueType = inputLogicalType.is(LogicalTypeRoot.MULTISET) ? new IntType(false) : ((MapType) inputLogicalType).getValueType();
    final String builderTerm = newName("builder");
    context.declareClassField(className(StringBuilder.class), builderTerm, constructorCall(StringBuilder.class));
    final String keyArrayTerm = newName("keys");
    final String valueArrayTerm = newName("values");
    final String resultStringTerm = newName("resultString");
    final int length = LogicalTypeChecks.getLength(targetLogicalType);
    CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter().declStmt(ArrayData.class, keyArrayTerm, methodCall(inputTerm, "keyArray")).declStmt(ArrayData.class, valueArrayTerm, methodCall(inputTerm, "valueArray")).stmt(methodCall(builderTerm, "setLength", 0)).stmt(methodCall(builderTerm, "append", strLiteral("{"))).forStmt(methodCall(inputTerm, "size"), (indexTerm, loopBodyWriter) -> {
        String keyTerm = newName("key");
        String keyIsNullTerm = newName("keyIsNull");
        String valueTerm = newName("value");
        String valueIsNullTerm = newName("valueIsNull");
        CastCodeBlock keyCast = // Null check is done at the key array access level
        CastRuleProvider.generateAlwaysNonNullCodeBlock(context, keyTerm, keyType, STRING_TYPE);
        CastCodeBlock valueCast = // Null check is done at the value array access level
        CastRuleProvider.generateAlwaysNonNullCodeBlock(context, valueTerm, valueType, STRING_TYPE);
        Consumer<CastRuleUtils.CodeWriter> appendNonNullValue = bodyWriter -> bodyWriter.assignStmt(valueTerm, rowFieldReadAccess(indexTerm, valueArrayTerm, valueType)).append(valueCast).stmt(methodCall(builderTerm, "append", valueCast.getReturnTerm()));
        if (!context.legacyBehaviour() && couldTrim(length)) {
            loopBodyWriter.ifStmt(stringExceedsLength(builderTerm, length), CastRuleUtils.CodeWriter::breakStmt);
        }
        loopBodyWriter.ifStmt(indexTerm + " != 0", thenBodyWriter -> thenBodyWriter.stmt(methodCall(builderTerm, "append", strLiteral(", ")))).declPrimitiveStmt(keyType, keyTerm).declStmt(boolean.class, keyIsNullTerm, methodCall(keyArrayTerm, "isNullAt", indexTerm)).declPrimitiveStmt(valueType, valueTerm).declStmt(boolean.class, valueIsNullTerm, methodCall(valueArrayTerm, "isNullAt", indexTerm)).ifStmt("!" + keyIsNullTerm, thenBodyWriter -> thenBodyWriter.assignStmt(keyTerm, rowFieldReadAccess(indexTerm, keyArrayTerm, keyType)).append(keyCast).stmt(methodCall(builderTerm, "append", keyCast.getReturnTerm())), elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", nullLiteral(context.legacyBehaviour())))).stmt(methodCall(builderTerm, "append", strLiteral("=")));
        if (inputLogicalType.is(LogicalTypeRoot.MULTISET)) {
            appendNonNullValue.accept(loopBodyWriter);
        } else {
            loopBodyWriter.ifStmt("!" + valueIsNullTerm, appendNonNullValue, elseBodyWriter -> elseBodyWriter.stmt(methodCall(builderTerm, "append", nullLiteral(context.legacyBehaviour()))));
        }
    }).stmt(methodCall(builderTerm, "append", strLiteral("}")));
    return CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded(writer, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, builderTerm).assignStmt(returnVariable, CastRuleUtils.staticCall(BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)).toString();
}
Also used : STRING_TYPE(org.apache.flink.table.types.logical.VarCharType.STRING_TYPE) CastRuleUtils.strLiteral(org.apache.flink.table.planner.functions.casting.CastRuleUtils.strLiteral) BINARY_STRING_DATA_FROM_STRING(org.apache.flink.table.planner.codegen.calls.BuiltInMethods.BINARY_STRING_DATA_FROM_STRING) IntType(org.apache.flink.table.types.logical.IntType) CodeGenUtils.className(org.apache.flink.table.planner.codegen.CodeGenUtils.className) CharVarCharTrimPadCastRule.couldTrim(org.apache.flink.table.planner.functions.casting.CharVarCharTrimPadCastRule.couldTrim) MapType(org.apache.flink.table.types.logical.MapType) CodeGenUtils.rowFieldReadAccess(org.apache.flink.table.planner.codegen.CodeGenUtils.rowFieldReadAccess) CastRuleUtils.nullLiteral(org.apache.flink.table.planner.functions.casting.CastRuleUtils.nullLiteral) CastRuleUtils.methodCall(org.apache.flink.table.planner.functions.casting.CastRuleUtils.methodCall) Consumer(java.util.function.Consumer) ArrayData(org.apache.flink.table.data.ArrayData) CharVarCharTrimPadCastRule.stringExceedsLength(org.apache.flink.table.planner.functions.casting.CharVarCharTrimPadCastRule.stringExceedsLength) LogicalType(org.apache.flink.table.types.logical.LogicalType) CastRuleUtils.constructorCall(org.apache.flink.table.planner.functions.casting.CastRuleUtils.constructorCall) LogicalTypeFamily(org.apache.flink.table.types.logical.LogicalTypeFamily) CodeGenUtils.newName(org.apache.flink.table.planner.codegen.CodeGenUtils.newName) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) MultisetType(org.apache.flink.table.types.logical.MultisetType) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) Consumer(java.util.function.Consumer) LogicalType(org.apache.flink.table.types.logical.LogicalType) IntType(org.apache.flink.table.types.logical.IntType) ArrayData(org.apache.flink.table.data.ArrayData)

Example 83 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class TypeInferenceOperandChecker method insertImplicitCasts.

private void insertImplicitCasts(SqlCallBinding callBinding, List<DataType> expectedDataTypes) {
    final FlinkTypeFactory flinkTypeFactory = unwrapTypeFactory(callBinding);
    final List<SqlNode> operands = callBinding.operands();
    for (int i = 0; i < operands.size(); i++) {
        final LogicalType expectedType = expectedDataTypes.get(i).getLogicalType();
        final LogicalType argumentType = toLogicalType(callBinding.getOperandType(i));
        if (!supportsAvoidingCast(argumentType, expectedType)) {
            final RelDataType expectedRelDataType = flinkTypeFactory.createFieldTypeFromLogicalType(expectedType);
            final SqlNode castedOperand = castTo(operands.get(i), expectedRelDataType);
            callBinding.getCall().setOperand(i, castedOperand);
            updateInferredType(callBinding.getValidator(), castedOperand, expectedRelDataType);
        }
    }
}
Also used : FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) FlinkTypeFactory.toLogicalType(org.apache.flink.table.planner.calcite.FlinkTypeFactory.toLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) SqlNode(org.apache.calcite.sql.SqlNode)

Example 84 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class RowToRowCastRule method generateCodeBlockInternal.

/* Example generated code for ROW<`f0` BIGINT, `f1` BIGINT, `f2` STRING, `f3` ARRAY<STRING>>:

    isNull$29 = false;
    if (!isNull$29) {
        writer$32.reset();
        boolean f0IsNull$34 = row$26.isNullAt(0);
        if (!f0IsNull$34) {
            int f0Value$33 = row$26.getInt(0);
            result$35 = ((long) (f0Value$33));
            if (!false) {
                writer$32.writeLong(0, result$35);
            } else {
                writer$32.setNullAt(0);
            }
        } else {
            writer$32.setNullAt(0);
        }
        boolean f1IsNull$37 = row$26.isNullAt(1);
        if (!f1IsNull$37) {
            int f1Value$36 = row$26.getInt(1);
            result$38 = ((long) (f1Value$36));
            if (!false) {
                writer$32.writeLong(1, result$38);
            } else {
                writer$32.setNullAt(1);
            }
        } else {
            writer$32.setNullAt(1);
        }
        boolean f2IsNull$40 = row$26.isNullAt(2);
        if (!f2IsNull$40) {
            int f2Value$39 = row$26.getInt(2);
            isNull$41 = f2IsNull$40;
            if (!isNull$41) {
                result$42 =
                        org.apache.flink.table.data.binary.BinaryStringData.fromString(
                                org.apache.flink.table.utils.DateTimeUtils
                                        .formatTimestampMillis(f2Value$39, 0));
                isNull$41 = result$42 == null;
            } else {
                result$42 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;
            }
            if (!isNull$41) {
                writer$32.writeString(2, result$42);
            } else {
                writer$32.setNullAt(2);
            }
        } else {
            writer$32.setNullAt(2);
        }
        boolean f3IsNull$44 = row$26.isNullAt(3);
        if (!f3IsNull$44) {
            org.apache.flink.table.data.ArrayData f3Value$43 = row$26.getArray(3);
            isNull$45 = f3IsNull$44;
            if (!isNull$45) {
                Object[] objArray$47 = new Object[f3Value$43.size()];
                for (int i$48 = 0; i$48 < f3Value$43.size(); i$48++) {
                    if (!f3Value$43.isNullAt(i$48)) {
                        objArray$47[i$48] =
                                ((org.apache.flink.table.data.binary.BinaryStringData)
                                        f3Value$43.getString(i$48));
                    }
                }
                result$46 = new org.apache.flink.table.data.GenericArrayData(objArray$47);
                isNull$45 = result$46 == null;
            } else {
                result$46 = null;
            }
            if (!isNull$45) {
                writer$32.writeArray(3, result$46, typeSerializer$49);
            } else {
                writer$32.setNullAt(3);
            }
        } else {
            writer$32.setNullAt(3);
        }
        writer$32.complete();
        result$30 = row$31;
        isNull$29 = result$30 == null;
    } else {
        result$30 = null;
    }

     */
@Override
protected String generateCodeBlockInternal(CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) {
    final List<LogicalType> inputFields = LogicalTypeChecks.getFieldTypes(inputLogicalType);
    final List<LogicalType> targetFields = LogicalTypeChecks.getFieldTypes(targetLogicalType);
    // Declare the row and row data
    final String rowTerm = newName("row");
    final String writerTerm = newName("writer");
    context.declareClassField(className(BinaryRowData.class), rowTerm, constructorCall(BinaryRowData.class, inputFields.size()));
    context.declareClassField(className(BinaryRowWriter.class), writerTerm, constructorCall(BinaryRowWriter.class, rowTerm));
    final CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter().stmt(methodCall(writerTerm, "reset"));
    for (int i = 0; i < targetFields.size(); i++) {
        final LogicalType inputFieldType = inputFields.get(i);
        final LogicalType targetFieldType = targetFields.get(i);
        final String indexTerm = String.valueOf(i);
        final String fieldTerm = newName("f" + indexTerm + "Value");
        final String fieldIsNullTerm = newName("f" + indexTerm + "IsNull");
        final CastCodeBlock codeBlock = // Null check is done at the row access level
        CastRuleProvider.generateAlwaysNonNullCodeBlock(context, fieldTerm, inputFieldType, targetFieldType);
        final String readField = rowFieldReadAccess(indexTerm, inputTerm, inputFieldType);
        final String writeField = binaryWriterWriteField(context, writerTerm, targetFieldType, indexTerm, codeBlock.getReturnTerm());
        final String writeNull = binaryWriterWriteNull(writerTerm, targetFieldType, indexTerm);
        writer.declStmt(boolean.class, fieldIsNullTerm, methodCall(inputTerm, "isNullAt", indexTerm)).ifStmt("!" + fieldIsNullTerm, thenBodyWriter -> thenBodyWriter.declPrimitiveStmt(inputFieldType, fieldTerm, readField).append(codeBlock).ifStmt("!" + codeBlock.getIsNullTerm(), thenCastResultWriter -> thenCastResultWriter.stmt(writeField), elseCastResultWriter -> elseCastResultWriter.stmt(writeNull)), elseBodyWriter -> elseBodyWriter.stmt(writeNull));
    }
    writer.stmt(methodCall(writerTerm, "complete")).assignStmt(returnVariable, rowTerm);
    return writer.toString();
}
Also used : IntStream(java.util.stream.IntStream) RowData(org.apache.flink.table.data.RowData) CodeGenUtils.className(org.apache.flink.table.planner.codegen.CodeGenUtils.className) CastRuleUtils.binaryWriterWriteField(org.apache.flink.table.planner.functions.casting.CastRuleUtils.binaryWriterWriteField) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) CodeGenUtils.rowFieldReadAccess(org.apache.flink.table.planner.codegen.CodeGenUtils.rowFieldReadAccess) CastRuleUtils.methodCall(org.apache.flink.table.planner.functions.casting.CastRuleUtils.methodCall) CastRuleUtils.binaryWriterWriteNull(org.apache.flink.table.planner.functions.casting.CastRuleUtils.binaryWriterWriteNull) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) CastRuleUtils.constructorCall(org.apache.flink.table.planner.functions.casting.CastRuleUtils.constructorCall) CodeGenUtils.newName(org.apache.flink.table.planner.codegen.CodeGenUtils.newName) LogicalTypeRoot(org.apache.flink.table.types.logical.LogicalTypeRoot) LogicalTypeChecks(org.apache.flink.table.types.logical.utils.LogicalTypeChecks) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 85 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class DynamicTableSourceSpecSerdeTest method testDynamicTableSinkSpecSerde.

public static Stream<DynamicTableSourceSpec> testDynamicTableSinkSpecSerde() {
    Map<String, String> options1 = new HashMap<>();
    options1.put("connector", FileSystemTableFactory.IDENTIFIER);
    options1.put("format", TestCsvFormatFactory.IDENTIFIER);
    options1.put("path", "/tmp");
    final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
    final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
    DynamicTableSourceSpec spec1 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
    Map<String, String> options2 = new HashMap<>();
    options2.put("connector", TestValuesTableFactory.IDENTIFIER);
    options2.put("disable-lookup", "true");
    options2.put("enable-watermark-push-down", "true");
    options2.put("filterable-fields", "b");
    options2.put("bounded", "false");
    options2.put("readable-metadata", "m1:INT, m2:STRING");
    final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("c", DataTypes.STRING()), Column.physical("p", DataTypes.STRING()), Column.metadata("m1", DataTypes.INT(), null, false), Column.metadata("m2", DataTypes.STRING(), null, false), Column.physical("ts", DataTypes.TIMESTAMP(3))), Collections.emptyList(), null);
    final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
    FlinkTypeFactory factory = FlinkTypeFactory.INSTANCE();
    RexBuilder rexBuilder = new RexBuilder(factory);
    DynamicTableSourceSpec spec2 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new ProjectPushDownSpec(new int[][] { { 0 }, { 1 }, { 4 }, { 6 } }, RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new ReadingMetadataSpec(Arrays.asList("m1", "m2"), RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new FilterPushDownSpec(Collections.singletonList(// b >= 10
    rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))))), new WatermarkPushDownSpec(rexBuilder.makeCall(SqlStdOperatorTable.MINUS, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.TIMESTAMP, 3), 3), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(1000), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO))), 5000, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new SourceWatermarkSpec(true, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new LimitPushDownSpec(100), new PartitionPushDownSpec(Arrays.asList(new HashMap<String, String>() {

        {
            put("p", "A");
        }
    }, new HashMap<String, String>() {

        {
            put("p", "B");
        }
    }))));
    return Stream.of(spec1, spec2);
}
Also used : WatermarkPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.WatermarkPushDownSpec) HashMap(java.util.HashMap) ProjectPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.ProjectPushDownSpec) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) DynamicTableSourceSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSourceSpec) LogicalType(org.apache.flink.table.types.logical.LogicalType) BigIntType(org.apache.flink.table.types.logical.BigIntType) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) ReadingMetadataSpec(org.apache.flink.table.planner.plan.abilities.source.ReadingMetadataSpec) BigDecimal(java.math.BigDecimal) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SourceWatermarkSpec(org.apache.flink.table.planner.plan.abilities.source.SourceWatermarkSpec) LimitPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.LimitPushDownSpec) PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) FilterPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.FilterPushDownSpec) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) TimestampType(org.apache.flink.table.types.logical.TimestampType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Aggregations

LogicalType (org.apache.flink.table.types.logical.LogicalType)192 DataType (org.apache.flink.table.types.DataType)53 RowType (org.apache.flink.table.types.logical.RowType)53 RowData (org.apache.flink.table.data.RowData)45 List (java.util.List)29 ArrayList (java.util.ArrayList)28 TableException (org.apache.flink.table.api.TableException)25 TimestampType (org.apache.flink.table.types.logical.TimestampType)25 Internal (org.apache.flink.annotation.Internal)21 IntType (org.apache.flink.table.types.logical.IntType)21 Map (java.util.Map)20 ValidationException (org.apache.flink.table.api.ValidationException)20 ArrayType (org.apache.flink.table.types.logical.ArrayType)19 DecimalType (org.apache.flink.table.types.logical.DecimalType)19 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)17 Test (org.junit.Test)17 BigIntType (org.apache.flink.table.types.logical.BigIntType)16 LegacyTypeInformationType (org.apache.flink.table.types.logical.LegacyTypeInformationType)16 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Arrays (java.util.Arrays)14