use of org.apache.flink.table.types.inference.TypeInference in project flink by splunk.
the class FunctionCatalogOperatorTable method convertToBridgingSqlFunction.
private Optional<SqlFunction> convertToBridgingSqlFunction(@Nullable SqlFunctionCategory category, ContextResolvedFunction resolvedFunction) {
final FunctionDefinition definition = resolvedFunction.getDefinition();
if (!verifyFunctionKind(category, resolvedFunction)) {
return Optional.empty();
}
final TypeInference typeInference;
try {
typeInference = definition.getTypeInference(dataTypeFactory);
} catch (Throwable t) {
throw new ValidationException(String.format("An error occurred in the type inference logic of function '%s'.", resolvedFunction), t);
}
if (typeInference.getOutputTypeStrategy() == TypeStrategies.MISSING) {
return Optional.empty();
}
final SqlFunction function;
if (definition.getKind() == FunctionKind.AGGREGATE || definition.getKind() == FunctionKind.TABLE_AGGREGATE) {
function = BridgingSqlAggFunction.of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
} else {
function = BridgingSqlFunction.of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
}
return Optional.of(function);
}
use of org.apache.flink.table.types.inference.TypeInference in project flink by splunk.
the class SqlAggFunctionVisitor method createSqlAggFunction.
private SqlAggFunction createSqlAggFunction(CallExpression call) {
final FunctionDefinition definition = call.getFunctionDefinition();
// legacy
if (definition instanceof AggregateFunctionDefinition) {
return createLegacySqlAggregateFunction(call.getFunctionIdentifier().orElse(null), (AggregateFunctionDefinition) definition);
} else if (definition instanceof TableAggregateFunctionDefinition) {
return createLegacySqlTableAggregateFunction(call.getFunctionIdentifier().orElse(null), (TableAggregateFunctionDefinition) definition);
}
// new stack
final DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory();
final TypeInference typeInference = definition.getTypeInference(dataTypeFactory);
return BridgingSqlAggFunction.of(dataTypeFactory, ShortcutUtils.unwrapTypeFactory(relBuilder), SqlKind.OTHER_FUNCTION, ContextResolvedFunction.fromCallExpression(call), typeInference);
}
use of org.apache.flink.table.types.inference.TypeInference in project flink by splunk.
the class BridgingSqlFunction method of.
/**
* Creates an instance of a scalar or table function during translation.
*/
public static BridgingSqlFunction of(FlinkContext context, FlinkTypeFactory typeFactory, ContextResolvedFunction resolvedFunction) {
final DataTypeFactory dataTypeFactory = context.getCatalogManager().getDataTypeFactory();
final TypeInference typeInference = resolvedFunction.getDefinition().getTypeInference(dataTypeFactory);
return of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
}
use of org.apache.flink.table.types.inference.TypeInference in project flink by splunk.
the class CommonPythonUtil method extractPythonAggregateFunctionInfos.
public static Tuple2<PythonAggregateFunctionInfo[], DataViewSpec[][]> extractPythonAggregateFunctionInfos(AggregateInfoList pythonAggregateInfoList, AggregateCall[] aggCalls) {
List<PythonAggregateFunctionInfo> pythonAggregateFunctionInfoList = new ArrayList<>();
List<DataViewSpec[]> dataViewSpecList = new ArrayList<>();
AggregateInfo[] aggInfos = pythonAggregateInfoList.aggInfos();
for (int i = 0; i < aggInfos.length; i++) {
AggregateInfo aggInfo = aggInfos[i];
UserDefinedFunction function = aggInfo.function();
if (function instanceof PythonFunction) {
pythonAggregateFunctionInfoList.add(new PythonAggregateFunctionInfo((PythonFunction) function, Arrays.stream(aggInfo.argIndexes()).boxed().toArray(), aggCalls[i].filterArg, aggCalls[i].isDistinct()));
TypeInference typeInference = function.getTypeInference(null);
dataViewSpecList.add(extractDataViewSpecs(i, typeInference.getAccumulatorTypeStrategy().get().inferType(null).get()));
} else {
int filterArg = -1;
boolean distinct = false;
if (i < aggCalls.length) {
filterArg = aggCalls[i].filterArg;
distinct = aggCalls[i].isDistinct();
}
pythonAggregateFunctionInfoList.add(new PythonAggregateFunctionInfo(getBuiltInPythonAggregateFunction(function), Arrays.stream(aggInfo.argIndexes()).boxed().toArray(), filterArg, distinct));
// The data views of the built in Python Aggregate Function are different from Java
// side, we will create the spec at Python side.
dataViewSpecList.add(new DataViewSpec[0]);
}
}
return Tuple2.of(pythonAggregateFunctionInfoList.toArray(new PythonAggregateFunctionInfo[0]), dataViewSpecList.toArray(new DataViewSpec[0][0]));
}
use of org.apache.flink.table.types.inference.TypeInference in project flink by apache.
the class LastDatedValueFunction method getTypeInference.
// --------------------------------------------------------------------------------------------
// Planning
// --------------------------------------------------------------------------------------------
/**
* Declares the {@link TypeInference} of this function. It specifies:
*
* <ul>
* <li>which argument types are supported when calling this function,
* <li>which {@link DataType#getConversionClass()} should be used when calling the JVM method
* {@link #accumulate(Accumulator, Object, LocalDate)} during runtime,
* <li>a similar strategy how to derive an accumulator type,
* <li>and a similar strategy how to derive the output type.
* </ul>
*/
@Override
public TypeInference getTypeInference(DataTypeFactory typeFactory) {
return TypeInference.newBuilder().inputTypeStrategy(InputTypeStrategies.sequence(InputTypeStrategies.ANY, InputTypeStrategies.explicit(DataTypes.DATE()))).accumulatorTypeStrategy(callContext -> {
final DataType argDataType = callContext.getArgumentDataTypes().get(0);
final DataType accDataType = DataTypes.STRUCTURED(Accumulator.class, DataTypes.FIELD("value", argDataType), DataTypes.FIELD("date", DataTypes.DATE()));
return Optional.of(accDataType);
}).outputTypeStrategy(callContext -> {
final DataType argDataType = callContext.getArgumentDataTypes().get(0);
final DataType outputDataType = DataTypes.ROW(DataTypes.FIELD("value", argDataType), DataTypes.FIELD("date", DataTypes.DATE()));
return Optional.of(outputDataType);
}).build();
}
Aggregations