use of io.confluent.ksql.function.types.LambdaType in project ksql by confluentinc.
the class UdfUtilTest method shouldGetFunction.
@Test
public void shouldGetFunction() throws NoSuchMethodException {
final Type type = getClass().getDeclaredMethod("functionType", Function.class).getGenericParameterTypes()[0];
final ParamType schema = UdfUtil.getSchemaFromType(type);
assertThat(schema, instanceOf(LambdaType.class));
assertThat(((LambdaType) schema).inputTypes(), equalTo(ImmutableList.of(ParamTypes.LONG)));
assertThat(((LambdaType) schema).returnType(), equalTo(ParamTypes.INTEGER));
}
use of io.confluent.ksql.function.types.LambdaType in project ksql by confluentinc.
the class UdfUtilTest method shouldGetTriFunction.
@Test
public void shouldGetTriFunction() throws NoSuchMethodException {
final Type type = getClass().getDeclaredMethod("triFunctionType", TriFunction.class).getGenericParameterTypes()[0];
final ParamType schema = UdfUtil.getSchemaFromType(type);
assertThat(schema, instanceOf(LambdaType.class));
assertThat(((LambdaType) schema).inputTypes(), equalTo(ImmutableList.of(ParamTypes.LONG, ParamTypes.INTEGER, ParamTypes.BOOLEAN)));
assertThat(((LambdaType) schema).returnType(), equalTo(ParamTypes.BOOLEAN));
}
use of io.confluent.ksql.function.types.LambdaType in project ksql by confluentinc.
the class FunctionArgumentsUtil method getFunctionTypeInfo.
/**
* Compute type information given a function call node. Specifically, computes
* the function return type, the types of all arguments, and the types of all
* lambda parameters for arguments that are lambda expressions.
*
* <p>Given a function call node, we have to do a two pass processing of the
* function arguments in order to properly handle any potential lambda functions.</p>
*
* <p>In the first pass, if there are lambda functions, we create a SqlLambda that only contains
* the number of input arguments for the lambda. We pass this first argument list
* to UdfFactory in order to get the correct function. We can make this assumption
* due to Java's handling of type erasure (Function(T,R) is considered the same as
* Function(U,R)).</p>
*
* <p>In the second pass, we use the LambdaType inputTypes field to construct SqlLambdaResolved
* that has the proper input type list and return type. We also need to construct a list of
* lambda type mapping that should be used when processing each function argument subtree.</p>
*
* @param expressionTypeManager an expression type manager
* @param functionCall the function expression
* @param udfFactory udf factory for the function in the expression
* @param lambdaMapping a type context
*
* @return a wrapper that contains a list of function arguments
* (any lambdas are SqlLambdaResolved), the ksql function,
* type contexts for use in further processing the function
* argument child nodes, and the return type of the ksql function
*/
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
public static FunctionTypeInfo getFunctionTypeInfo(final ExpressionTypeManager expressionTypeManager, final FunctionCall functionCall, final UdfFactory udfFactory, final Map<String, SqlType> lambdaMapping) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final List<Expression> arguments = functionCall.getArguments();
final List<SqlArgument> functionArgumentTypes = firstPassOverFunctionArguments(arguments, expressionTypeManager, lambdaMapping);
final KsqlScalarFunction function = udfFactory.getFunction(functionArgumentTypes);
final SqlType returnSchema;
final List<ArgumentInfo> argumentInfoForFunction = new ArrayList<>();
if (!functionCall.hasLambdaFunctionCallArguments()) {
returnSchema = function.getReturnType(functionArgumentTypes);
return FunctionTypeInfo.of(functionArgumentTypes.stream().map(argument -> ArgumentInfo.of(argument, new HashMap<>(lambdaMapping))).collect(Collectors.toList()), returnSchema, function);
} else {
final List<ParamType> paramTypes = function.parameters();
final Map<GenericType, SqlType> reservedGenerics = new HashMap<>();
final List<SqlArgument> functionArgumentTypesWithResolvedLambdaType = new ArrayList<>();
// second pass over the function arguments to properly do lambda type checking
for (int i = 0; i < arguments.size(); i++) {
final Expression expression = arguments.get(i);
final ParamType parameter = paramTypes.get(i);
if (expression instanceof LambdaFunctionCall) {
// lambda node at this index in the function node argument list
if (!(parameter instanceof LambdaType)) {
throw new RuntimeException(String.format("Error while processing lambda function." + "Expected lambda parameter but was %s" + "This is most likely an internal error and a " + "Github issue should be filed for debugging. " + "Include the function name, the parameters passed in, the expected " + "signature, and any other relevant information.", parameter.toString()));
}
final ArrayList<SqlType> lambdaSqlTypes = new ArrayList<>();
final Map<String, SqlType> variableTypeMapping = mapLambdaParametersToTypes((LambdaFunctionCall) expression, (LambdaType) parameter, reservedGenerics, lambdaSqlTypes);
final Map<String, SqlType> updateLambdaMapping = LambdaMappingUtil.resolveOldAndNewLambdaMapping(variableTypeMapping, lambdaMapping);
final SqlType resolvedLambdaReturnType = expressionTypeManager.getExpressionSqlType(expression, updateLambdaMapping);
final SqlArgument lambdaArgument = SqlArgument.of(SqlLambdaResolved.of(lambdaSqlTypes, resolvedLambdaReturnType));
functionArgumentTypesWithResolvedLambdaType.add(lambdaArgument);
argumentInfoForFunction.add(ArgumentInfo.of(lambdaArgument, new HashMap<>(updateLambdaMapping)));
} else {
functionArgumentTypesWithResolvedLambdaType.add(functionArgumentTypes.get(i));
argumentInfoForFunction.add(ArgumentInfo.of(functionArgumentTypes.get(i), new HashMap<>(lambdaMapping)));
}
if (GenericsUtil.hasGenerics(parameter)) {
final Pair<Boolean, Optional<KsqlException>> success = GenericsUtil.reserveGenerics(parameter, functionArgumentTypesWithResolvedLambdaType.get(i), reservedGenerics);
if (!success.getLeft() && success.getRight().isPresent()) {
throw success.getRight().get();
}
}
}
returnSchema = function.getReturnType(functionArgumentTypesWithResolvedLambdaType);
return new FunctionTypeInfo(argumentInfoForFunction, returnSchema, function);
}
}
use of io.confluent.ksql.function.types.LambdaType in project ksql by confluentinc.
the class GenericsUtil method resolveGenerics.
/**
* Identifies a mapping from generic type to concrete type based on a {@code schema} and
* an {@code instance}, where the {@code instance} schema is expected to have no generic
* types and have the same nested structure as {@code schema}. Any Generic type mapping
* identified is added to the list passed in.
*
* @param mapping a list of GenericType to SqlType mappings
* @param schema the schema that may contain generics
* @param instance a schema with the same structure as {@code schema} but with no generics
*
* @return whether we were able to resolve generics in the instance and schema
*/
// CHECKSTYLE_RULES.OFF: NPathComplexity
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
private static boolean resolveGenerics(final List<Entry<GenericType, SqlType>> mapping, final ParamType schema, final SqlArgument instance) {
if (!isGeneric(schema) && !matches(schema, instance)) {
// cannot identify from type mismatch
return false;
} else if (!hasGenerics(schema)) {
// nothing left to identify
return true;
}
KsqlPreconditions.checkArgument(isGeneric(schema) || (matches(schema, instance)), "Cannot resolve generics if the schema and instance have differing types: " + schema + " vs. " + instance);
if (schema instanceof LambdaType) {
final LambdaType lambdaType = (LambdaType) schema;
final SqlLambda sqlLambda = instance.getSqlLambdaOrThrow();
if (lambdaType.inputTypes().size() == sqlLambda.getNumInputs()) {
if (sqlLambda instanceof SqlLambdaResolved) {
final SqlLambdaResolved sqlLambdaResolved = (SqlLambdaResolved) sqlLambda;
int i = 0;
for (final ParamType paramType : lambdaType.inputTypes()) {
if (!resolveGenerics(mapping, paramType, SqlArgument.of(sqlLambdaResolved.getInputType().get(i)))) {
return false;
}
i++;
}
return resolveGenerics(mapping, lambdaType.returnType(), SqlArgument.of(sqlLambdaResolved.getReturnType()));
} else {
return true;
}
} else {
return false;
}
}
final SqlType sqlType = instance.getSqlTypeOrThrow();
if (isGeneric(schema)) {
mapping.add(new HashMap.SimpleEntry<>((GenericType) schema, sqlType));
}
if (schema instanceof ArrayType) {
final SqlArray sqlArray = (SqlArray) sqlType;
return resolveGenerics(mapping, ((ArrayType) schema).element(), SqlArgument.of(sqlArray.getItemType()));
}
if (schema instanceof MapType) {
final SqlMap sqlMap = (SqlMap) sqlType;
final MapType mapType = (MapType) schema;
return resolveGenerics(mapping, mapType.key(), SqlArgument.of(sqlMap.getKeyType())) && resolveGenerics(mapping, mapType.value(), SqlArgument.of(sqlMap.getValueType()));
}
if (schema instanceof StructType) {
throw new KsqlException("Generic STRUCT is not yet supported");
}
return true;
}
use of io.confluent.ksql.function.types.LambdaType in project ksql by confluentinc.
the class GenericsUtilTest method shouldFailToIdentifySqlLambdaResolvedWithDifferentSchema.
@Test
public void shouldFailToIdentifySqlLambdaResolvedWithDifferentSchema() {
// Given:
final GenericType typeA = GenericType.of("A");
final GenericType typeB = GenericType.of("B");
final GenericType typeC = GenericType.of("C");
final LambdaType a = LambdaType.of(ImmutableList.of(typeA, typeC), typeB);
final SqlArgument instance = SqlArgument.of(SqlLambdaResolved.of(ImmutableList.of(SqlTypes.DOUBLE), SqlTypes.BIGINT));
// When:
final Exception e = assertThrows(KsqlException.class, () -> GenericsUtil.reserveGenerics(a, instance));
// Then:
assertThat(e.getMessage(), containsString("Cannot infer generics for LAMBDA (A, C) => B from LAMBDA (DOUBLE) => BIGINT " + "because they do not have the same schema structure"));
}
Aggregations