use of org.apache.calcite.sql.SqlFunction in project flink by splunk.
the class SqlValidatorImpl method deriveConstructorType.
public RelDataType deriveConstructorType(SqlValidatorScope scope, SqlCall call, SqlFunction unresolvedConstructor, SqlFunction resolvedConstructor, List<RelDataType> argTypes) {
SqlIdentifier sqlIdentifier = unresolvedConstructor.getSqlIdentifier();
assert sqlIdentifier != null;
RelDataType type = catalogReader.getNamedType(sqlIdentifier);
if (type == null) {
// TODO jvs 12-Feb-2005: proper type name formatting
throw newValidationError(sqlIdentifier, RESOURCE.unknownDatatypeName(sqlIdentifier.toString()));
}
if (resolvedConstructor == null) {
if (call.operandCount() > 0) {
// no user-defined constructor could be found
throw handleUnresolvedFunction(call, unresolvedConstructor, argTypes, null);
}
} else {
SqlCall testCall = resolvedConstructor.createCall(call.getParserPosition(), call.getOperandList());
RelDataType returnType = resolvedConstructor.validateOperands(this, scope, testCall);
assert type == returnType;
}
if (config.identifierExpansion()) {
if (resolvedConstructor != null) {
((SqlBasicCall) call).setOperator(resolvedConstructor);
} else {
// fake a fully-qualified call to the default constructor
((SqlBasicCall) call).setOperator(new SqlFunction(type.getSqlIdentifier(), ReturnTypes.explicit(type), null, null, null, SqlFunctionCategory.USER_DEFINED_CONSTRUCTOR));
}
}
return type;
}
use of org.apache.calcite.sql.SqlFunction in project flink by splunk.
the class SqlValidatorImpl method validateCall.
public void validateCall(SqlCall call, SqlValidatorScope scope) {
final SqlOperator operator = call.getOperator();
if ((call.operandCount() == 0) && (operator.getSyntax() == SqlSyntax.FUNCTION_ID) && !call.isExpanded() && !this.config.sqlConformance().allowNiladicParentheses()) {
// SqlIdentifier.)
throw handleUnresolvedFunction(call, (SqlFunction) operator, ImmutableList.of(), null);
}
SqlValidatorScope operandScope = scope.getOperandScope(call);
if (operator instanceof SqlFunction && ((SqlFunction) operator).getFunctionType() == SqlFunctionCategory.MATCH_RECOGNIZE && !(operandScope instanceof MatchRecognizeScope)) {
throw newValidationError(call, Static.RESOURCE.functionMatchRecognizeOnly(call.toString()));
}
// Delegate validation to the operator.
operator.validateCall(call, this, scope, operandScope);
}
use of org.apache.calcite.sql.SqlFunction in project flink by splunk.
the class LegacyScalarFunctionConvertRule method convert.
@Override
public Optional<RexNode> convert(CallExpression call, ConvertContext context) {
FunctionDefinition def = call.getFunctionDefinition();
if (def instanceof ScalarFunctionDefinition) {
ScalarFunction scalaFunc = ((ScalarFunctionDefinition) def).getScalarFunction();
FunctionIdentifier identifier = call.getFunctionIdentifier().orElse(FunctionIdentifier.of(generateInlineFunctionName(scalaFunc)));
SqlFunction sqlFunction = UserDefinedFunctionUtils.createScalarSqlFunction(identifier, scalaFunc.toString(), scalaFunc, context.getTypeFactory());
return Optional.of(context.getRelBuilder().call(sqlFunction, toRexNodes(context, call.getChildren())));
}
return Optional.empty();
}
use of org.apache.calcite.sql.SqlFunction in project flink-mirror by flink-ci.
the class FunctionCatalogOperatorTable method convertToBridgingSqlFunction.
private Optional<SqlFunction> convertToBridgingSqlFunction(@Nullable SqlFunctionCategory category, ContextResolvedFunction resolvedFunction) {
final FunctionDefinition definition = resolvedFunction.getDefinition();
if (!verifyFunctionKind(category, resolvedFunction)) {
return Optional.empty();
}
final TypeInference typeInference;
try {
typeInference = definition.getTypeInference(dataTypeFactory);
} catch (Throwable t) {
throw new ValidationException(String.format("An error occurred in the type inference logic of function '%s'.", resolvedFunction), t);
}
if (typeInference.getOutputTypeStrategy() == TypeStrategies.MISSING) {
return Optional.empty();
}
final SqlFunction function;
if (definition.getKind() == FunctionKind.AGGREGATE || definition.getKind() == FunctionKind.TABLE_AGGREGATE) {
function = BridgingSqlAggFunction.of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
} else {
function = BridgingSqlFunction.of(dataTypeFactory, typeFactory, SqlKind.OTHER_FUNCTION, resolvedFunction, typeInference);
}
return Optional.of(function);
}
use of org.apache.calcite.sql.SqlFunction in project flink-mirror by flink-ci.
the class LegacyScalarFunctionConvertRule method convert.
@Override
public Optional<RexNode> convert(CallExpression call, ConvertContext context) {
FunctionDefinition def = call.getFunctionDefinition();
if (def instanceof ScalarFunctionDefinition) {
ScalarFunction scalaFunc = ((ScalarFunctionDefinition) def).getScalarFunction();
FunctionIdentifier identifier = call.getFunctionIdentifier().orElse(FunctionIdentifier.of(generateInlineFunctionName(scalaFunc)));
SqlFunction sqlFunction = UserDefinedFunctionUtils.createScalarSqlFunction(identifier, scalaFunc.toString(), scalaFunc, context.getTypeFactory());
return Optional.of(context.getRelBuilder().call(sqlFunction, toRexNodes(context, call.getChildren())));
}
return Optional.empty();
}
Aggregations