use of io.confluent.ksql.schema.ksql.SqlArgument in project ksql by confluentinc.
the class FunctionArgumentsUtil method getFunctionTypeInfo.
/**
* Compute type information given a function call node. Specifically, computes
* the function return type, the types of all arguments, and the types of all
* lambda parameters for arguments that are lambda expressions.
*
* <p>Given a function call node, we have to do a two pass processing of the
* function arguments in order to properly handle any potential lambda functions.</p>
*
* <p>In the first pass, if there are lambda functions, we create a SqlLambda that only contains
* the number of input arguments for the lambda. We pass this first argument list
* to UdfFactory in order to get the correct function. We can make this assumption
* due to Java's handling of type erasure (Function(T,R) is considered the same as
* Function(U,R)).</p>
*
* <p>In the second pass, we use the LambdaType inputTypes field to construct SqlLambdaResolved
* that has the proper input type list and return type. We also need to construct a list of
* lambda type mapping that should be used when processing each function argument subtree.</p>
*
* @param expressionTypeManager an expression type manager
* @param functionCall the function expression
* @param udfFactory udf factory for the function in the expression
* @param lambdaMapping a type context
*
* @return a wrapper that contains a list of function arguments
* (any lambdas are SqlLambdaResolved), the ksql function,
* type contexts for use in further processing the function
* argument child nodes, and the return type of the ksql function
*/
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
public static FunctionTypeInfo getFunctionTypeInfo(final ExpressionTypeManager expressionTypeManager, final FunctionCall functionCall, final UdfFactory udfFactory, final Map<String, SqlType> lambdaMapping) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final List<Expression> arguments = functionCall.getArguments();
final List<SqlArgument> functionArgumentTypes = firstPassOverFunctionArguments(arguments, expressionTypeManager, lambdaMapping);
final KsqlScalarFunction function = udfFactory.getFunction(functionArgumentTypes);
final SqlType returnSchema;
final List<ArgumentInfo> argumentInfoForFunction = new ArrayList<>();
if (!functionCall.hasLambdaFunctionCallArguments()) {
returnSchema = function.getReturnType(functionArgumentTypes);
return FunctionTypeInfo.of(functionArgumentTypes.stream().map(argument -> ArgumentInfo.of(argument, new HashMap<>(lambdaMapping))).collect(Collectors.toList()), returnSchema, function);
} else {
final List<ParamType> paramTypes = function.parameters();
final Map<GenericType, SqlType> reservedGenerics = new HashMap<>();
final List<SqlArgument> functionArgumentTypesWithResolvedLambdaType = new ArrayList<>();
// second pass over the function arguments to properly do lambda type checking
for (int i = 0; i < arguments.size(); i++) {
final Expression expression = arguments.get(i);
final ParamType parameter = paramTypes.get(i);
if (expression instanceof LambdaFunctionCall) {
// lambda node at this index in the function node argument list
if (!(parameter instanceof LambdaType)) {
throw new RuntimeException(String.format("Error while processing lambda function." + "Expected lambda parameter but was %s" + "This is most likely an internal error and a " + "Github issue should be filed for debugging. " + "Include the function name, the parameters passed in, the expected " + "signature, and any other relevant information.", parameter.toString()));
}
final ArrayList<SqlType> lambdaSqlTypes = new ArrayList<>();
final Map<String, SqlType> variableTypeMapping = mapLambdaParametersToTypes((LambdaFunctionCall) expression, (LambdaType) parameter, reservedGenerics, lambdaSqlTypes);
final Map<String, SqlType> updateLambdaMapping = LambdaMappingUtil.resolveOldAndNewLambdaMapping(variableTypeMapping, lambdaMapping);
final SqlType resolvedLambdaReturnType = expressionTypeManager.getExpressionSqlType(expression, updateLambdaMapping);
final SqlArgument lambdaArgument = SqlArgument.of(SqlLambdaResolved.of(lambdaSqlTypes, resolvedLambdaReturnType));
functionArgumentTypesWithResolvedLambdaType.add(lambdaArgument);
argumentInfoForFunction.add(ArgumentInfo.of(lambdaArgument, new HashMap<>(updateLambdaMapping)));
} else {
functionArgumentTypesWithResolvedLambdaType.add(functionArgumentTypes.get(i));
argumentInfoForFunction.add(ArgumentInfo.of(functionArgumentTypes.get(i), new HashMap<>(lambdaMapping)));
}
if (GenericsUtil.hasGenerics(parameter)) {
final Pair<Boolean, Optional<KsqlException>> success = GenericsUtil.reserveGenerics(parameter, functionArgumentTypesWithResolvedLambdaType.get(i), reservedGenerics);
if (!success.getLeft() && success.getRight().isPresent()) {
throw success.getRight().get();
}
}
}
returnSchema = function.getReturnType(functionArgumentTypesWithResolvedLambdaType);
return new FunctionTypeInfo(argumentInfoForFunction, returnSchema, function);
}
}
use of io.confluent.ksql.schema.ksql.SqlArgument in project ksql by confluentinc.
the class UdfIndex method getCandidates.
private void getCandidates(final List<SqlArgument> arguments, final int argIndex, final Node current, final List<Node> candidates, final Map<GenericType, SqlType> reservedGenerics, final boolean allowCasts) {
if (argIndex == arguments.size()) {
if (current.value != null) {
candidates.add(current);
}
return;
}
final SqlArgument arg = arguments.get(argIndex);
for (final Entry<Parameter, Node> candidate : current.children.entrySet()) {
final Map<GenericType, SqlType> reservedCopy = new HashMap<>(reservedGenerics);
if (candidate.getKey().accepts(arg, reservedCopy, allowCasts)) {
final Node node = candidate.getValue();
getCandidates(arguments, argIndex + 1, node, candidates, reservedCopy, allowCasts);
}
}
}
use of io.confluent.ksql.schema.ksql.SqlArgument in project ksql by confluentinc.
the class UdfLoaderTest method shouldLoadFunctionWithSchemaProvider.
@Test
public void shouldLoadFunctionWithSchemaProvider() {
// Given:
final UdfFactory returnDecimal = FUNC_REG.getUdfFactory(FunctionName.of("returndecimal"));
// When:
final SqlDecimal decimal = SqlTypes.decimal(2, 1);
final List<SqlArgument> args = Collections.singletonList(SqlArgument.of(decimal));
final KsqlScalarFunction function = returnDecimal.getFunction(args);
// Then:
assertThat(function.getReturnType(args), equalTo(decimal));
}
use of io.confluent.ksql.schema.ksql.SqlArgument in project ksql by confluentinc.
the class UdfLoaderTest method shouldThrowOnReturnTypeMismatch.
@Test
public void shouldThrowOnReturnTypeMismatch() {
// Given:
final UdfFactory returnIncompatible = FUNC_REG.getUdfFactory(of("returnincompatible"));
final SqlDecimal decimal = decimal(2, 1);
final List<SqlArgument> args = singletonList(SqlArgument.of(decimal));
final KsqlScalarFunction function = returnIncompatible.getFunction(args);
// When:
final Exception e = assertThrows(KsqlException.class, () -> function.getReturnType(args));
// Then:
assertThat(e.getMessage(), containsString("Return type DECIMAL(2, 1) of UDF RETURNINCOMPATIBLE does not " + "match the declared return type STRING."));
}
use of io.confluent.ksql.schema.ksql.SqlArgument in project ksql by confluentinc.
the class UdtfUtil method resolveTableFunction.
public static KsqlTableFunction resolveTableFunction(final FunctionRegistry functionRegistry, final FunctionCall functionCall, final LogicalSchema schema) {
final ExpressionTypeManager expressionTypeManager = new ExpressionTypeManager(schema, functionRegistry);
final List<Expression> functionArgs = functionCall.getArguments();
final List<SqlArgument> argTypes = functionArgs.isEmpty() ? ImmutableList.of(SqlArgument.of(FunctionRegistry.DEFAULT_FUNCTION_ARG_SCHEMA)) : functionArgs.stream().map(expressionTypeManager::getExpressionSqlType).map(SqlArgument::of).collect(Collectors.toList());
return functionRegistry.getTableFunction(functionCall.getName(), argTypes);
}
Aggregations