Search in sources :

Example 31 with DataTypeFactory

use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.

the class BridgingSqlFunction method of.

/**
 * Creates an instance of a scalar or table function during translation.
 */
public static BridgingSqlFunction of(FlinkContext context, FlinkTypeFactory typeFactory, ContextResolvedFunction resolvedFunction) {
    final DataTypeFactory dataTypeFactory = context.getCatalogManager().getDataTypeFactory();
    final TypeInference typeInference = resolvedFunction.getDefinition().getTypeInference(dataTypeFactory);
    return of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
}
Also used : TypeInference(org.apache.flink.table.types.inference.TypeInference) BridgingUtils.createSqlReturnTypeInference(org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlReturnTypeInference) BridgingUtils.createSqlOperandTypeInference(org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlOperandTypeInference) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory)

Example 32 with DataTypeFactory

use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.

the class BridgingSqlAggFunction method of.

/**
 * Creates an instance of a aggregate function during translation.
 */
public static BridgingSqlAggFunction of(FlinkContext context, FlinkTypeFactory typeFactory, ContextResolvedFunction resolvedFunction) {
    final DataTypeFactory dataTypeFactory = context.getCatalogManager().getDataTypeFactory();
    final TypeInference typeInference = resolvedFunction.getDefinition().getTypeInference(dataTypeFactory);
    return of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
}
Also used : TypeInference(org.apache.flink.table.types.inference.TypeInference) BridgingUtils.createSqlReturnTypeInference(org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlReturnTypeInference) BridgingUtils.createSqlOperandTypeInference(org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlOperandTypeInference) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory)

Example 33 with DataTypeFactory

use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.

the class RelDataTypeJsonSerializer method serialize.

@Override
public void serialize(RelDataType relDataType, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
    final SerdeContext serdeContext = SerdeContext.get(serializerProvider);
    final DataTypeFactory dataTypeFactory = serdeContext.getFlinkContext().getCatalogManager().getDataTypeFactory();
    // Conversion to LogicalType also ensures that Calcite's type system is materialized
    // so data types like DECIMAL will receive a concrete precision and scale (not unspecified
    // anymore).
    final LogicalType logicalType = LogicalRelDataTypeConverter.toLogicalType(relDataType, dataTypeFactory);
    serializerProvider.defaultSerializeValue(logicalType, jsonGenerator);
}
Also used : LogicalType(org.apache.flink.table.types.logical.LogicalType) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory)

Example 34 with DataTypeFactory

use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.

the class BuiltInFunctionTestBase method testFunction.

@Test
public void testFunction() {
    final TableEnvironment env = TableEnvironment.create(EnvironmentSettings.newInstance().build());
    env.getConfig().addConfiguration(configuration());
    testSpec.functions.forEach(f -> env.createTemporarySystemFunction(f.getSimpleName(), f));
    final DataTypeFactory dataTypeFactory = ((TableEnvironmentInternal) env).getCatalogManager().getDataTypeFactory();
    final Table inputTable;
    if (testSpec.fieldDataTypes == null) {
        inputTable = env.fromValues(Row.of(testSpec.fieldData));
    } else {
        final DataTypes.UnresolvedField[] fields = IntStream.range(0, testSpec.fieldDataTypes.length).mapToObj(i -> DataTypes.FIELD("f" + i, testSpec.fieldDataTypes[i])).toArray(DataTypes.UnresolvedField[]::new);
        inputTable = env.fromValues(DataTypes.ROW(fields), Row.of(testSpec.fieldData));
    }
    for (TestItem testItem : testSpec.testItems) {
        try {
            if (testItem instanceof ResultTestItem<?>) {
                testResult(dataTypeFactory, env, inputTable, (ResultTestItem<?>) testItem);
            } else if (testItem instanceof ErrorTestItem<?>) {
                testError(env, inputTable, (ErrorTestItem<?>) testItem);
            }
        } catch (Throwable t) {
            throw new AssertionError("Failing test item: " + testItem, t);
        }
    }
}
Also used : DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) BuiltInFunctionDefinition(org.apache.flink.table.functions.BuiltInFunctionDefinition) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) RunWith(org.junit.runner.RunWith) Expression(org.apache.flink.table.expressions.Expression) AtomicReference(java.util.concurrent.atomic.AtomicReference) MiniClusterResourceConfiguration(org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration) ArrayList(java.util.ArrayList) Collections.singletonList(java.util.Collections.singletonList) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Assertions.catchThrowable(org.assertj.core.api.Assertions.catchThrowable) ClassRule(org.junit.ClassRule) FlinkAssertions.anyCauseMatches(org.apache.flink.core.testutils.FlinkAssertions.anyCauseMatches) Parameterized(org.junit.runners.Parameterized) Nullable(javax.annotation.Nullable) AbstractDataType(org.apache.flink.table.types.AbstractDataType) MiniClusterWithClientResource(org.apache.flink.test.util.MiniClusterWithClientResource) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Iterator(java.util.Iterator) Parameter(org.junit.runners.Parameterized.Parameter) Configuration(org.apache.flink.configuration.Configuration) DataTypes(org.apache.flink.table.api.DataTypes) UserDefinedFunction(org.apache.flink.table.functions.UserDefinedFunction) Test(org.junit.Test) Table(org.apache.flink.table.api.Table) Preconditions(org.apache.flink.util.Preconditions) Collectors(java.util.stream.Collectors) Consumer(java.util.function.Consumer) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) EnvironmentSettings(org.apache.flink.table.api.EnvironmentSettings) TableResult(org.apache.flink.table.api.TableResult) Row(org.apache.flink.types.Row) TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) Table(org.apache.flink.table.api.Table) TableEnvironment(org.apache.flink.table.api.TableEnvironment) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) Assertions.catchThrowable(org.assertj.core.api.Assertions.catchThrowable) DataTypes(org.apache.flink.table.api.DataTypes) Test(org.junit.Test)

Example 35 with DataTypeFactory

use of org.apache.flink.table.catalog.DataTypeFactory in project flink-mirror by flink-ci.

the class CommonExecLookupJoin method createAsyncLookupJoin.

@SuppressWarnings("unchecked")
private StreamOperatorFactory<RowData> createAsyncLookupJoin(RelOptTable temporalTable, ExecNodeConfig config, Map<Integer, LookupJoinUtil.LookupKey> allLookupKeys, AsyncTableFunction<Object> asyncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin) {
    int asyncBufferCapacity = config.get(ExecutionConfigOptions.TABLE_EXEC_ASYNC_LOOKUP_BUFFER_CAPACITY);
    long asyncTimeout = config.get(ExecutionConfigOptions.TABLE_EXEC_ASYNC_LOOKUP_TIMEOUT).toMillis();
    DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
    LookupJoinCodeGenerator.GeneratedTableFunctionWithDataType<AsyncFunction<RowData, Object>> generatedFuncWithType = LookupJoinCodeGenerator.generateAsyncLookupFunction(config.getTableConfig(), dataTypeFactory, inputRowType, tableSourceRowType, resultRowType, allLookupKeys, LookupJoinUtil.getOrderedLookupKeys(allLookupKeys.keySet()), asyncLookupFunction, StringUtils.join(temporalTable.getQualifiedName(), "."));
    RowType rightRowType = Optional.ofNullable(temporalTableOutputType).map(FlinkTypeFactory::toLogicalRowType).orElse(tableSourceRowType);
    // a projection or filter after table source scan
    GeneratedResultFuture<TableFunctionResultFuture<RowData>> generatedResultFuture = LookupJoinCodeGenerator.generateTableAsyncCollector(config.getTableConfig(), "TableFunctionResultFuture", inputRowType, rightRowType, JavaScalaConversionUtil.toScala(Optional.ofNullable(joinCondition)));
    DataStructureConverter<?, ?> fetcherConverter = DataStructureConverters.getConverter(generatedFuncWithType.dataType());
    AsyncFunction<RowData, RowData> asyncFunc;
    if (existCalcOnTemporalTable) {
        // a projection or filter after table source scan
        GeneratedFunction<FlatMapFunction<RowData, RowData>> generatedCalc = LookupJoinCodeGenerator.generateCalcMapFunction(config.getTableConfig(), JavaScalaConversionUtil.toScala(projectionOnTemporalTable), filterOnTemporalTable, temporalTableOutputType, tableSourceRowType);
        asyncFunc = new AsyncLookupJoinWithCalcRunner(generatedFuncWithType.tableFunc(), (DataStructureConverter<RowData, Object>) fetcherConverter, generatedCalc, generatedResultFuture, InternalSerializers.create(rightRowType), isLeftOuterJoin, asyncBufferCapacity);
    } else {
        // right type is the same as table source row type, because no calc after temporal table
        asyncFunc = new AsyncLookupJoinRunner(generatedFuncWithType.tableFunc(), (DataStructureConverter<RowData, Object>) fetcherConverter, generatedResultFuture, InternalSerializers.create(rightRowType), isLeftOuterJoin, asyncBufferCapacity);
    }
    // when the downstream do not need orderness
    return new AsyncWaitOperatorFactory<>(asyncFunc, asyncTimeout, asyncBufferCapacity, AsyncDataStream.OutputMode.ORDERED);
}
Also used : AsyncLookupJoinRunner(org.apache.flink.table.runtime.operators.join.lookup.AsyncLookupJoinRunner) AsyncWaitOperatorFactory(org.apache.flink.streaming.api.operators.async.AsyncWaitOperatorFactory) DataStructureConverter(org.apache.flink.table.data.conversion.DataStructureConverter) RowType(org.apache.flink.table.types.logical.RowType) DataTypeFactory(org.apache.flink.table.catalog.DataTypeFactory) AsyncLookupJoinWithCalcRunner(org.apache.flink.table.runtime.operators.join.lookup.AsyncLookupJoinWithCalcRunner) AsyncFunction(org.apache.flink.streaming.api.functions.async.AsyncFunction) LookupJoinCodeGenerator(org.apache.flink.table.planner.codegen.LookupJoinCodeGenerator) RowData(org.apache.flink.table.data.RowData) TableFunctionResultFuture(org.apache.flink.table.runtime.collector.TableFunctionResultFuture) FlatMapFunction(org.apache.flink.api.common.functions.FlatMapFunction)

Aggregations

DataTypeFactory (org.apache.flink.table.catalog.DataTypeFactory)36 DataType (org.apache.flink.table.types.DataType)15 TypeInference (org.apache.flink.table.types.inference.TypeInference)12 List (java.util.List)9 IntStream (java.util.stream.IntStream)9 DataTypes (org.apache.flink.table.api.DataTypes)9 RowData (org.apache.flink.table.data.RowData)9 RowType (org.apache.flink.table.types.logical.RowType)9 Row (org.apache.flink.types.Row)9 Field (java.lang.reflect.Field)6 LocalDate (java.time.LocalDate)6 Collectors (java.util.stream.Collectors)5 Nullable (javax.annotation.Nullable)5 Internal (org.apache.flink.annotation.Internal)5 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)5 BridgingUtils.createSqlOperandTypeInference (org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlOperandTypeInference)5 BridgingUtils.createSqlReturnTypeInference (org.apache.flink.table.planner.functions.bridging.BridgingUtils.createSqlReturnTypeInference)5 ExtractionUtils.getStructuredField (org.apache.flink.table.types.extraction.ExtractionUtils.getStructuredField)5 LogicalType (org.apache.flink.table.types.logical.LogicalType)5 StructuredType (org.apache.flink.table.types.logical.StructuredType)5