use of io.confluent.ksql.schema.ksql.types.SqlType in project ksql by confluentinc.
the class GenericExpressionResolverTest method shouldParseTime.
@Test
public void shouldParseTime() {
// Given:
final SqlType type = SqlTypes.TIME;
final Expression exp = new StringLiteral("04:40:02");
// When:
Object o = new GenericExpressionResolver(type, FIELD_NAME, registry, config, "insert value", false).resolve(exp);
// Then:
assertTrue(o instanceof Time);
assertThat(((Time) o).getTime(), is(16802000L));
}
use of io.confluent.ksql.schema.ksql.types.SqlType in project ksql by confluentinc.
the class FunctionArgumentsUtil method getFunctionTypeInfo.
/**
* Compute type information given a function call node. Specifically, computes
* the function return type, the types of all arguments, and the types of all
* lambda parameters for arguments that are lambda expressions.
*
* <p>Given a function call node, we have to do a two pass processing of the
* function arguments in order to properly handle any potential lambda functions.</p>
*
* <p>In the first pass, if there are lambda functions, we create a SqlLambda that only contains
* the number of input arguments for the lambda. We pass this first argument list
* to UdfFactory in order to get the correct function. We can make this assumption
* due to Java's handling of type erasure (Function(T,R) is considered the same as
* Function(U,R)).</p>
*
* <p>In the second pass, we use the LambdaType inputTypes field to construct SqlLambdaResolved
* that has the proper input type list and return type. We also need to construct a list of
* lambda type mapping that should be used when processing each function argument subtree.</p>
*
* @param expressionTypeManager an expression type manager
* @param functionCall the function expression
* @param udfFactory udf factory for the function in the expression
* @param lambdaMapping a type context
*
* @return a wrapper that contains a list of function arguments
* (any lambdas are SqlLambdaResolved), the ksql function,
* type contexts for use in further processing the function
* argument child nodes, and the return type of the ksql function
*/
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
public static FunctionTypeInfo getFunctionTypeInfo(final ExpressionTypeManager expressionTypeManager, final FunctionCall functionCall, final UdfFactory udfFactory, final Map<String, SqlType> lambdaMapping) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final List<Expression> arguments = functionCall.getArguments();
final List<SqlArgument> functionArgumentTypes = firstPassOverFunctionArguments(arguments, expressionTypeManager, lambdaMapping);
final KsqlScalarFunction function = udfFactory.getFunction(functionArgumentTypes);
final SqlType returnSchema;
final List<ArgumentInfo> argumentInfoForFunction = new ArrayList<>();
if (!functionCall.hasLambdaFunctionCallArguments()) {
returnSchema = function.getReturnType(functionArgumentTypes);
return FunctionTypeInfo.of(functionArgumentTypes.stream().map(argument -> ArgumentInfo.of(argument, new HashMap<>(lambdaMapping))).collect(Collectors.toList()), returnSchema, function);
} else {
final List<ParamType> paramTypes = function.parameters();
final Map<GenericType, SqlType> reservedGenerics = new HashMap<>();
final List<SqlArgument> functionArgumentTypesWithResolvedLambdaType = new ArrayList<>();
// second pass over the function arguments to properly do lambda type checking
for (int i = 0; i < arguments.size(); i++) {
final Expression expression = arguments.get(i);
final ParamType parameter = paramTypes.get(i);
if (expression instanceof LambdaFunctionCall) {
// lambda node at this index in the function node argument list
if (!(parameter instanceof LambdaType)) {
throw new RuntimeException(String.format("Error while processing lambda function." + "Expected lambda parameter but was %s" + "This is most likely an internal error and a " + "Github issue should be filed for debugging. " + "Include the function name, the parameters passed in, the expected " + "signature, and any other relevant information.", parameter.toString()));
}
final ArrayList<SqlType> lambdaSqlTypes = new ArrayList<>();
final Map<String, SqlType> variableTypeMapping = mapLambdaParametersToTypes((LambdaFunctionCall) expression, (LambdaType) parameter, reservedGenerics, lambdaSqlTypes);
final Map<String, SqlType> updateLambdaMapping = LambdaMappingUtil.resolveOldAndNewLambdaMapping(variableTypeMapping, lambdaMapping);
final SqlType resolvedLambdaReturnType = expressionTypeManager.getExpressionSqlType(expression, updateLambdaMapping);
final SqlArgument lambdaArgument = SqlArgument.of(SqlLambdaResolved.of(lambdaSqlTypes, resolvedLambdaReturnType));
functionArgumentTypesWithResolvedLambdaType.add(lambdaArgument);
argumentInfoForFunction.add(ArgumentInfo.of(lambdaArgument, new HashMap<>(updateLambdaMapping)));
} else {
functionArgumentTypesWithResolvedLambdaType.add(functionArgumentTypes.get(i));
argumentInfoForFunction.add(ArgumentInfo.of(functionArgumentTypes.get(i), new HashMap<>(lambdaMapping)));
}
if (GenericsUtil.hasGenerics(parameter)) {
final Pair<Boolean, Optional<KsqlException>> success = GenericsUtil.reserveGenerics(parameter, functionArgumentTypesWithResolvedLambdaType.get(i), reservedGenerics);
if (!success.getLeft() && success.getRight().isPresent()) {
throw success.getRight().get();
}
}
}
returnSchema = function.getReturnType(functionArgumentTypesWithResolvedLambdaType);
return new FunctionTypeInfo(argumentInfoForFunction, returnSchema, function);
}
}
use of io.confluent.ksql.schema.ksql.types.SqlType in project ksql by confluentinc.
the class FunctionArgumentsUtil method mapLambdaParametersToTypes.
private static Map<String, SqlType> mapLambdaParametersToTypes(final LambdaFunctionCall lambdaFunctionCall, final LambdaType lambdaParameter, final Map<GenericType, SqlType> reservedGenerics, final ArrayList<SqlType> lambdaSqlTypes) {
if (lambdaFunctionCall.getArguments().size() != lambdaParameter.inputTypes().size()) {
throw new IllegalArgumentException("Was expecting " + lambdaParameter.inputTypes().size() + " arguments but found " + lambdaFunctionCall.getArguments().size() + ", " + lambdaFunctionCall.getArguments() + ". Check your lambda statement.");
}
final Iterator<String> lambdaArgs = lambdaFunctionCall.getArguments().listIterator();
final HashMap<String, SqlType> variableTypeMapping = new HashMap<>();
for (ParamType inputParam : lambdaParameter.inputTypes()) {
if (inputParam instanceof GenericType) {
final GenericType genericParam = (GenericType) inputParam;
if (!reservedGenerics.containsKey(genericParam)) {
throw new RuntimeException(String.format("Could not resolve type for generic %s. " + "The generic mapping so far: %s", genericParam.toString(), reservedGenerics.toString()));
}
variableTypeMapping.put(lambdaArgs.next(), reservedGenerics.get(genericParam));
lambdaSqlTypes.add(reservedGenerics.get(genericParam));
} else {
variableTypeMapping.put(lambdaArgs.next(), SchemaConverters.functionToSqlConverter().toSqlType(inputParam));
lambdaSqlTypes.add(SchemaConverters.functionToSqlConverter().toSqlType(inputParam));
}
}
return ImmutableMap.copyOf(variableTypeMapping);
}
use of io.confluent.ksql.schema.ksql.types.SqlType in project ksql by confluentinc.
the class TermCompiler method visitFunctionCall.
@Override
public Term visitFunctionCall(final FunctionCall node, final Context context) {
final UdfFactory udfFactory = functionRegistry.getUdfFactory(node.getName());
final FunctionTypeInfo argumentsAndContext = FunctionArgumentsUtil.getFunctionTypeInfo(expressionTypeManager, node, udfFactory, context.getLambdaSqlTypeMapping());
final List<ArgumentInfo> argumentInfos = argumentsAndContext.getArgumentInfos();
final KsqlScalarFunction function = argumentsAndContext.getFunction();
final SqlType functionReturnSchema = argumentsAndContext.getReturnType();
final Class<?> javaClass = SchemaConverters.sqlToJavaConverter().toJavaType(functionReturnSchema);
final List<Expression> arguments = node.getArguments();
final List<Term> args = new ArrayList<>();
for (int i = 0; i < arguments.size(); i++) {
final Expression arg = arguments.get(i);
// lambda arguments and null values are considered to have null type
final SqlType sqlType = argumentInfos.get(i).getSqlArgument().getSqlType().orElse(null);
;
final ParamType paramType;
if (i >= function.parameters().size() - 1 && function.isVariadic()) {
paramType = ((ArrayType) Iterables.getLast(function.parameters())).element();
} else {
paramType = function.parameters().get(i);
}
// This will attempt to cast to the expected argument type and will throw an error if
// it cannot be done.
final Term argTerm = process(convertArgument(arg, sqlType, paramType), new Context(argumentInfos.get(i).getLambdaSqlTypeMapping()));
args.add(argTerm);
}
final Kudf kudf = function.newInstance(ksqlConfig);
return new FunctionCallTerm(kudf, args, javaClass, functionReturnSchema);
}
use of io.confluent.ksql.schema.ksql.types.SqlType in project ksql by confluentinc.
the class LogicalSchema method compatibleSchema.
/**
* Returns True if this schema is compatible with {@code other} schema.
*/
public boolean compatibleSchema(final LogicalSchema other) {
if (columns().size() != other.columns().size()) {
return false;
}
for (int i = 0; i < columns().size(); i++) {
final Column s1Column = columns().get(i);
final Column s2Column = other.columns().get(i);
final SqlType s2Type = s2Column.type();
if (!s1Column.equalsIgnoreType(s2Column) || !s1Column.canImplicitlyCast(s2Type)) {
return false;
}
}
return true;
}
Aggregations