use of io.confluent.ksql.function.KsqlScalarFunction in project ksql by confluentinc.
the class SqlToJavaVisitorTest method shouldGenerateCorrectCodeForLambdaExpression.
@Test
public void shouldGenerateCorrectCodeForLambdaExpression() {
// Given:
final UdfFactory udfFactory = mock(UdfFactory.class);
final KsqlScalarFunction udf = mock(KsqlScalarFunction.class);
givenUdf("ABS", udfFactory, udf, SqlTypes.STRING);
givenUdf("TRANSFORM", udfFactory, udf, SqlTypes.STRING);
when(udf.parameters()).thenReturn(ImmutableList.of(ArrayType.of(ParamTypes.DOUBLE), LambdaType.of(ImmutableList.of(ParamTypes.DOUBLE), ParamTypes.DOUBLE)));
final Expression expression = new FunctionCall(FunctionName.of("TRANSFORM"), ImmutableList.of(ARRAYCOL, new LambdaFunctionCall(ImmutableList.of("x"), (new FunctionCall(FunctionName.of("ABS"), ImmutableList.of(new LambdaVariable("X")))))));
// When:
final String javaExpression = sqlToJavaVisitor.process(expression);
// Then
assertThat(javaExpression, equalTo("((String) TRANSFORM_0.evaluate(COL4, new Function() {\n @Override\n public Object apply(Object arg1) {\n final Double x = (Double) arg1;\n return ((String) ABS_1.evaluate(X));\n }\n}))"));
}
use of io.confluent.ksql.function.KsqlScalarFunction in project ksql by confluentinc.
the class FunctionArgumentsUtil method getFunctionTypeInfo.
/**
* Compute type information given a function call node. Specifically, computes
* the function return type, the types of all arguments, and the types of all
* lambda parameters for arguments that are lambda expressions.
*
* <p>Given a function call node, we have to do a two pass processing of the
* function arguments in order to properly handle any potential lambda functions.</p>
*
* <p>In the first pass, if there are lambda functions, we create a SqlLambda that only contains
* the number of input arguments for the lambda. We pass this first argument list
* to UdfFactory in order to get the correct function. We can make this assumption
* due to Java's handling of type erasure (Function(T,R) is considered the same as
* Function(U,R)).</p>
*
* <p>In the second pass, we use the LambdaType inputTypes field to construct SqlLambdaResolved
* that has the proper input type list and return type. We also need to construct a list of
* lambda type mapping that should be used when processing each function argument subtree.</p>
*
* @param expressionTypeManager an expression type manager
* @param functionCall the function expression
* @param udfFactory udf factory for the function in the expression
* @param lambdaMapping a type context
*
* @return a wrapper that contains a list of function arguments
* (any lambdas are SqlLambdaResolved), the ksql function,
* type contexts for use in further processing the function
* argument child nodes, and the return type of the ksql function
*/
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
public static FunctionTypeInfo getFunctionTypeInfo(final ExpressionTypeManager expressionTypeManager, final FunctionCall functionCall, final UdfFactory udfFactory, final Map<String, SqlType> lambdaMapping) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final List<Expression> arguments = functionCall.getArguments();
final List<SqlArgument> functionArgumentTypes = firstPassOverFunctionArguments(arguments, expressionTypeManager, lambdaMapping);
final KsqlScalarFunction function = udfFactory.getFunction(functionArgumentTypes);
final SqlType returnSchema;
final List<ArgumentInfo> argumentInfoForFunction = new ArrayList<>();
if (!functionCall.hasLambdaFunctionCallArguments()) {
returnSchema = function.getReturnType(functionArgumentTypes);
return FunctionTypeInfo.of(functionArgumentTypes.stream().map(argument -> ArgumentInfo.of(argument, new HashMap<>(lambdaMapping))).collect(Collectors.toList()), returnSchema, function);
} else {
final List<ParamType> paramTypes = function.parameters();
final Map<GenericType, SqlType> reservedGenerics = new HashMap<>();
final List<SqlArgument> functionArgumentTypesWithResolvedLambdaType = new ArrayList<>();
// second pass over the function arguments to properly do lambda type checking
for (int i = 0; i < arguments.size(); i++) {
final Expression expression = arguments.get(i);
final ParamType parameter = paramTypes.get(i);
if (expression instanceof LambdaFunctionCall) {
// lambda node at this index in the function node argument list
if (!(parameter instanceof LambdaType)) {
throw new RuntimeException(String.format("Error while processing lambda function." + "Expected lambda parameter but was %s" + "This is most likely an internal error and a " + "Github issue should be filed for debugging. " + "Include the function name, the parameters passed in, the expected " + "signature, and any other relevant information.", parameter.toString()));
}
final ArrayList<SqlType> lambdaSqlTypes = new ArrayList<>();
final Map<String, SqlType> variableTypeMapping = mapLambdaParametersToTypes((LambdaFunctionCall) expression, (LambdaType) parameter, reservedGenerics, lambdaSqlTypes);
final Map<String, SqlType> updateLambdaMapping = LambdaMappingUtil.resolveOldAndNewLambdaMapping(variableTypeMapping, lambdaMapping);
final SqlType resolvedLambdaReturnType = expressionTypeManager.getExpressionSqlType(expression, updateLambdaMapping);
final SqlArgument lambdaArgument = SqlArgument.of(SqlLambdaResolved.of(lambdaSqlTypes, resolvedLambdaReturnType));
functionArgumentTypesWithResolvedLambdaType.add(lambdaArgument);
argumentInfoForFunction.add(ArgumentInfo.of(lambdaArgument, new HashMap<>(updateLambdaMapping)));
} else {
functionArgumentTypesWithResolvedLambdaType.add(functionArgumentTypes.get(i));
argumentInfoForFunction.add(ArgumentInfo.of(functionArgumentTypes.get(i), new HashMap<>(lambdaMapping)));
}
if (GenericsUtil.hasGenerics(parameter)) {
final Pair<Boolean, Optional<KsqlException>> success = GenericsUtil.reserveGenerics(parameter, functionArgumentTypesWithResolvedLambdaType.get(i), reservedGenerics);
if (!success.getLeft() && success.getRight().isPresent()) {
throw success.getRight().get();
}
}
}
returnSchema = function.getReturnType(functionArgumentTypesWithResolvedLambdaType);
return new FunctionTypeInfo(argumentInfoForFunction, returnSchema, function);
}
}
use of io.confluent.ksql.function.KsqlScalarFunction in project ksql by confluentinc.
the class SqlToJavaVisitorTest method shouldPostfixFunctionInstancesWithUniqueId.
@Test
public void shouldPostfixFunctionInstancesWithUniqueId() {
// Given:
final UdfFactory ssFactory = mock(UdfFactory.class);
final KsqlScalarFunction ssFunction = mock(KsqlScalarFunction.class);
final UdfFactory catFactory = mock(UdfFactory.class);
final KsqlScalarFunction catFunction = mock(KsqlScalarFunction.class);
givenUdf("SUBSTRING", ssFactory, ssFunction, SqlTypes.STRING);
when(ssFunction.parameters()).thenReturn(ImmutableList.of(ParamTypes.STRING, ParamTypes.INTEGER, ParamTypes.INTEGER));
givenUdf("CONCAT", catFactory, catFunction, SqlTypes.STRING);
when(catFunction.parameters()).thenReturn(ImmutableList.of(ParamTypes.STRING, ParamTypes.STRING));
final FunctionName ssName = FunctionName.of("SUBSTRING");
final FunctionName catName = FunctionName.of("CONCAT");
final FunctionCall substring1 = new FunctionCall(ssName, ImmutableList.of(COL1, new IntegerLiteral(1), new IntegerLiteral(3)));
final FunctionCall substring2 = new FunctionCall(ssName, ImmutableList.of(COL1, new IntegerLiteral(4), new IntegerLiteral(5)));
final FunctionCall concat = new FunctionCall(catName, ImmutableList.of(new StringLiteral("-"), substring2));
final Expression expression = new FunctionCall(catName, ImmutableList.of(substring1, concat));
// When:
final String javaExpression = sqlToJavaVisitor.process(expression);
// Then:
assertThat(javaExpression, is("((String) CONCAT_0.evaluate(" + "((String) SUBSTRING_1.evaluate(COL1, 1, 3)), " + "((String) CONCAT_2.evaluate(\"-\"," + " ((String) SUBSTRING_3.evaluate(COL1, 4, 5))))))"));
}
use of io.confluent.ksql.function.KsqlScalarFunction in project ksql by confluentinc.
the class TermCompiler method visitFunctionCall.
@Override
public Term visitFunctionCall(final FunctionCall node, final Context context) {
final UdfFactory udfFactory = functionRegistry.getUdfFactory(node.getName());
final FunctionTypeInfo argumentsAndContext = FunctionArgumentsUtil.getFunctionTypeInfo(expressionTypeManager, node, udfFactory, context.getLambdaSqlTypeMapping());
final List<ArgumentInfo> argumentInfos = argumentsAndContext.getArgumentInfos();
final KsqlScalarFunction function = argumentsAndContext.getFunction();
final SqlType functionReturnSchema = argumentsAndContext.getReturnType();
final Class<?> javaClass = SchemaConverters.sqlToJavaConverter().toJavaType(functionReturnSchema);
final List<Expression> arguments = node.getArguments();
final List<Term> args = new ArrayList<>();
for (int i = 0; i < arguments.size(); i++) {
final Expression arg = arguments.get(i);
// lambda arguments and null values are considered to have null type
final SqlType sqlType = argumentInfos.get(i).getSqlArgument().getSqlType().orElse(null);
;
final ParamType paramType;
if (i >= function.parameters().size() - 1 && function.isVariadic()) {
paramType = ((ArrayType) Iterables.getLast(function.parameters())).element();
} else {
paramType = function.parameters().get(i);
}
// This will attempt to cast to the expected argument type and will throw an error if
// it cannot be done.
final Term argTerm = process(convertArgument(arg, sqlType, paramType), new Context(argumentInfos.get(i).getLambdaSqlTypeMapping()));
args.add(argTerm);
}
final Kudf kudf = function.newInstance(ksqlConfig);
return new FunctionCallTerm(kudf, args, javaClass, functionReturnSchema);
}
use of io.confluent.ksql.function.KsqlScalarFunction in project ksql by confluentinc.
the class SqlToJavaVisitorTest method shouldGenerateCorrectCodeForNestedLambdas.
@Test
public void shouldGenerateCorrectCodeForNestedLambdas() {
// Given:
final UdfFactory udfFactory = mock(UdfFactory.class);
final KsqlScalarFunction udf = mock(KsqlScalarFunction.class);
givenUdf("nested", udfFactory, udf, SqlTypes.DOUBLE);
when(udf.parameters()).thenReturn(ImmutableList.of(ArrayType.of(ParamTypes.DOUBLE), ParamTypes.DOUBLE, LambdaType.of(ImmutableList.of(ParamTypes.DOUBLE, ParamTypes.INTEGER), ParamTypes.INTEGER)));
final Expression expression = new ArithmeticBinaryExpression(Operator.ADD, new FunctionCall(FunctionName.of("nested"), ImmutableList.of(ARRAYCOL, new IntegerLiteral(0), new LambdaFunctionCall(ImmutableList.of("A", "B"), new ArithmeticBinaryExpression(Operator.ADD, new FunctionCall(FunctionName.of("nested"), ImmutableList.of(ARRAYCOL, new IntegerLiteral(0), new LambdaFunctionCall(ImmutableList.of("Q", "V"), new ArithmeticBinaryExpression(Operator.ADD, new LambdaVariable("Q"), new LambdaVariable("V"))))), new LambdaVariable("B"))))), new IntegerLiteral(5));
// When:
final String javaExpression = sqlToJavaVisitor.process(expression);
// Then
assertThat(javaExpression, equalTo("(((Double) nested_0.evaluate(COL4, (Double)NullSafe.apply(0,new Function() {\n" + " @Override\n" + " public Object apply(Object arg1) {\n" + " final Integer val = (Integer) arg1;\n" + " return val.doubleValue();\n" + " }\n" + "}), new BiFunction() {\n" + " @Override\n" + " public Object apply(Object arg1, Object arg2) {\n" + " final Double A = (Double) arg1;\n" + " final Integer B = (Integer) arg2;\n" + " return (((Double) nested_1.evaluate(COL4, (Double)NullSafe.apply(0,new Function() {\n" + " @Override\n" + " public Object apply(Object arg1) {\n" + " final Integer val = (Integer) arg1;\n" + " return val.doubleValue();\n" + " }\n" + "}), new BiFunction() {\n" + " @Override\n" + " public Object apply(Object arg1, Object arg2) {\n" + " final Double Q = (Double) arg1;\n" + " final Integer V = (Integer) arg2;\n" + " return (Q + V);\n" + " }\n" + "})) + B);\n" + " }\n" + "})) + 5)"));
}
Aggregations