use of org.apache.flink.table.functions.FunctionDefinition in project flink by apache.
the class FilterUtils method getValue.
private static Comparable<?> getValue(Expression expr, Function<String, Comparable<?>> getter) {
if (expr instanceof ValueLiteralExpression) {
Optional<?> value = ((ValueLiteralExpression) expr).getValueAs(((ValueLiteralExpression) expr).getOutputDataType().getConversionClass());
return (Comparable<?>) value.orElse(null);
}
if (expr instanceof FieldReferenceExpression) {
return getter.apply(((FieldReferenceExpression) expr).getName());
}
if (expr instanceof CallExpression && expr.getChildren().size() == 1) {
Object child = getValue(expr.getChildren().get(0), getter);
FunctionDefinition functionDefinition = ((CallExpression) expr).getFunctionDefinition();
if (functionDefinition.equals(UPPER)) {
return child.toString().toUpperCase();
} else if (functionDefinition.equals(LOWER)) {
return child.toString().toLowerCase();
} else {
throw new UnsupportedOperationException(String.format("Unrecognized function definition: %s.", functionDefinition));
}
}
throw new UnsupportedOperationException(expr + " not supported!");
}
use of org.apache.flink.table.functions.FunctionDefinition in project flink by apache.
the class HiveModuleTest method testHiveBuiltInFunction.
@Test
public void testHiveBuiltInFunction() {
FunctionDefinition fd = new HiveModule().getFunctionDefinition("reverse").get();
HiveSimpleUDF udf = (HiveSimpleUDF) fd;
DataType[] inputType = new DataType[] { DataTypes.STRING() };
CallContext callContext = new HiveUDFCallContext(new Object[0], inputType);
udf.getTypeInference(null).getOutputTypeStrategy().inferType(callContext);
udf.open(null);
assertEquals("cba", udf.eval("abc"));
}
use of org.apache.flink.table.functions.FunctionDefinition in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateFunction.
private Operation convertCreateFunction(HiveParserASTNode ast) {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
String className = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
// belong to a catalog/db
throw new ValidationException("Temporary function cannot be created with a qualified name.");
}
if (isTemporaryFunction) {
FunctionDefinition funcDefinition = funcDefFactory.createFunctionDefinition(functionName, new CatalogFunctionImpl(className, FunctionLanguage.JAVA));
return new CreateTempSystemFunctionOperation(functionName, false, funcDefinition);
} else {
ObjectIdentifier identifier = parseObjectIdentifier(functionName);
CatalogFunction catalogFunction = new CatalogFunctionImpl(className, FunctionLanguage.JAVA);
return new CreateCatalogFunctionOperation(identifier, catalogFunction, false, false);
}
}
use of org.apache.flink.table.functions.FunctionDefinition in project flink by apache.
the class ValuesOperationFactory method convertToExpectedType.
private Optional<ResolvedExpression> convertToExpectedType(ResolvedExpression sourceExpression, DataType targetDataType, ExpressionResolver.PostResolverFactory postResolverFactory) {
LogicalType sourceLogicalType = sourceExpression.getOutputDataType().getLogicalType();
LogicalType targetLogicalType = targetDataType.getLogicalType();
// if the expression is a literal try converting the literal in place instead of casting
if (sourceExpression instanceof ValueLiteralExpression) {
// Assign a type to a null literal
if (sourceLogicalType.is(NULL)) {
return Optional.of(valueLiteral(null, targetDataType));
}
// Check if the source value class is a valid input conversion class of the target type
// It may happen that a user wanted to use a secondary input conversion class as a value
// for
// a different type than what we derived.
//
// Example: we interpreted 1L as BIGINT, but user wanted to interpret it as a TIMESTAMP
// In this case long is a valid conversion class for TIMESTAMP, but a
// cast from BIGINT to TIMESTAMP is an invalid operation.
Optional<Object> value = ((ValueLiteralExpression) sourceExpression).getValueAs(Object.class);
if (value.isPresent() && targetLogicalType.supportsInputConversion(value.get().getClass())) {
ValueLiteralExpression convertedLiteral = valueLiteral(value.get(), targetDataType.notNull().bridgedTo(value.get().getClass()));
if (targetLogicalType.isNullable()) {
return Optional.of(postResolverFactory.cast(convertedLiteral, targetDataType));
} else {
return Optional.of(convertedLiteral);
}
}
}
if (sourceExpression instanceof CallExpression) {
FunctionDefinition functionDefinition = ((CallExpression) sourceExpression).getFunctionDefinition();
if (functionDefinition == BuiltInFunctionDefinitions.ROW && targetLogicalType.is(ROW)) {
return convertRowToExpectedType(sourceExpression, (FieldsDataType) targetDataType, postResolverFactory);
} else if (functionDefinition == BuiltInFunctionDefinitions.ARRAY && targetLogicalType.is(ARRAY)) {
return convertArrayToExpectedType(sourceExpression, (CollectionDataType) targetDataType, postResolverFactory);
} else if (functionDefinition == BuiltInFunctionDefinitions.MAP && targetLogicalType.is(MAP)) {
return convertMapToExpectedType(sourceExpression, (KeyValueDataType) targetDataType, postResolverFactory);
}
}
// might know that a certain function will not produce nullable values for a given input
if (supportsExplicitCast(sourceLogicalType.copy(true), targetLogicalType.copy(true))) {
return Optional.of(postResolverFactory.cast(sourceExpression, targetDataType));
} else {
return Optional.empty();
}
}
use of org.apache.flink.table.functions.FunctionDefinition in project flink by apache.
the class FunctionCatalog method resolvePreciseFunctionReference.
private Optional<ContextResolvedFunction> resolvePreciseFunctionReference(ObjectIdentifier oi) {
// resolve order:
// 1. Temporary functions
// 2. Catalog functions
ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(oi);
CatalogFunction potentialResult = tempCatalogFunctions.get(normalizedIdentifier);
if (potentialResult != null) {
return Optional.of(ContextResolvedFunction.temporary(FunctionIdentifier.of(oi), getFunctionDefinition(oi.getObjectName(), potentialResult)));
}
Optional<Catalog> catalogOptional = catalogManager.getCatalog(oi.getCatalogName());
if (catalogOptional.isPresent()) {
Catalog catalog = catalogOptional.get();
try {
CatalogFunction catalogFunction = catalog.getFunction(new ObjectPath(oi.getDatabaseName(), oi.getObjectName()));
FunctionDefinition fd;
if (catalog.getFunctionDefinitionFactory().isPresent() && catalogFunction.getFunctionLanguage() != FunctionLanguage.PYTHON) {
fd = catalog.getFunctionDefinitionFactory().get().createFunctionDefinition(oi.getObjectName(), catalogFunction);
} else {
fd = getFunctionDefinition(oi.asSummaryString(), catalogFunction);
}
return Optional.of(ContextResolvedFunction.permanent(FunctionIdentifier.of(oi), fd));
} catch (FunctionNotExistException e) {
// Ignore
}
}
return Optional.empty();
}
Aggregations