use of org.apache.flink.table.catalog.CatalogFunction in project flink by apache.
the class HiveCatalog method alterFunction.
@Override
public void alterFunction(ObjectPath functionPath, CatalogFunction newFunction, boolean ignoreIfNotExists) throws FunctionNotExistException, CatalogException {
checkNotNull(functionPath, "functionPath cannot be null");
checkNotNull(newFunction, "newFunction cannot be null");
try {
// check if function exists
getFunction(functionPath);
Function hiveFunction;
if (newFunction instanceof CatalogFunctionImpl) {
hiveFunction = instantiateHiveFunction(functionPath, newFunction);
} else {
throw new CatalogException(String.format("Unsupported catalog function type %s", newFunction.getClass().getName()));
}
client.alterFunction(functionPath.getDatabaseName(), functionPath.getObjectName(), hiveFunction);
} catch (FunctionNotExistException e) {
if (!ignoreIfNotExists) {
throw e;
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter function %s", functionPath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogFunction in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateFunction.
private Operation convertCreateFunction(HiveParserASTNode ast) {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
String className = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
// belong to a catalog/db
throw new ValidationException("Temporary function cannot be created with a qualified name.");
}
if (isTemporaryFunction) {
FunctionDefinition funcDefinition = funcDefFactory.createFunctionDefinition(functionName, new CatalogFunctionImpl(className, FunctionLanguage.JAVA));
return new CreateTempSystemFunctionOperation(functionName, false, funcDefinition);
} else {
ObjectIdentifier identifier = parseObjectIdentifier(functionName);
CatalogFunction catalogFunction = new CatalogFunctionImpl(className, FunctionLanguage.JAVA);
return new CreateCatalogFunctionOperation(identifier, catalogFunction, false, false);
}
}
use of org.apache.flink.table.catalog.CatalogFunction in project flink by apache.
the class HiveCatalogGenericMetadataTest method testFunctionWithNonExistClass.
// ------ functions ------
@Test
public void testFunctionWithNonExistClass() throws Exception {
// to make sure hive catalog doesn't check function class
catalog.createDatabase(db1, createDb(), false);
CatalogFunction catalogFunction = new CatalogFunctionImpl("non.exist.scala.class", FunctionLanguage.SCALA);
catalog.createFunction(path1, catalogFunction, false);
assertEquals(catalogFunction.getClassName(), catalog.getFunction(path1).getClassName());
assertEquals(catalogFunction.getFunctionLanguage(), catalog.getFunction(path1).getFunctionLanguage());
// alter the function
catalogFunction = new CatalogFunctionImpl("non.exist.java.class", FunctionLanguage.JAVA);
catalog.alterFunction(path1, catalogFunction, false);
assertEquals(catalogFunction.getClassName(), catalog.getFunction(path1).getClassName());
assertEquals(catalogFunction.getFunctionLanguage(), catalog.getFunction(path1).getFunctionLanguage());
catalogFunction = new CatalogFunctionImpl("non.exist.python.class", FunctionLanguage.PYTHON);
catalog.alterFunction(path1, catalogFunction, false);
assertEquals(catalogFunction.getClassName(), catalog.getFunction(path1).getClassName());
assertEquals(catalogFunction.getFunctionLanguage(), catalog.getFunction(path1).getFunctionLanguage());
}
use of org.apache.flink.table.catalog.CatalogFunction in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableWithWatermark.
@Test
public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException {
CatalogFunction cf = new CatalogFunctionImpl(JavaUserDefinedScalarFunctions.JavaFunc5.class.getName());
catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true);
final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n";
final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT);
final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT);
SqlNode node = parser.parse(sql);
assertThat(node).isInstanceOf(SqlCreateTable.class);
Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get();
assertThat(operation).isInstanceOf(CreateTableOperation.class);
CreateTableOperation op = (CreateTableOperation) operation;
CatalogTable catalogTable = op.getCatalogTable();
Map<String, String> properties = catalogTable.toProperties();
Map<String, String> expected = new HashMap<>();
expected.put("schema.0.name", "a");
expected.put("schema.0.data-type", "INT");
expected.put("schema.1.name", "b");
expected.put("schema.1.data-type", "BIGINT");
expected.put("schema.2.name", "c");
expected.put("schema.2.data-type", "TIMESTAMP(3)");
expected.put("schema.watermark.0.rowtime", "c");
expected.put("schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND");
expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
expected.put("connector.type", "kafka");
assertThat(properties).isEqualTo(expected);
}
use of org.apache.flink.table.catalog.CatalogFunction in project flink by apache.
the class FunctionITCase method testAlterFunction.
@Test
public void testAlterFunction() throws Exception {
String create = "create function f3 as 'org.apache.flink.function.TestFunction'";
String alter = "alter function f3 as 'org.apache.flink.function.TestFunction2'";
ObjectPath objectPath = new ObjectPath("default_database", "f3");
assertTrue(tEnv().getCatalog("default_catalog").isPresent());
Catalog catalog = tEnv().getCatalog("default_catalog").get();
tEnv().executeSql(create);
CatalogFunction beforeUpdate = catalog.getFunction(objectPath);
assertEquals("org.apache.flink.function.TestFunction", beforeUpdate.getClassName());
tEnv().executeSql(alter);
CatalogFunction afterUpdate = catalog.getFunction(objectPath);
assertEquals("org.apache.flink.function.TestFunction2", afterUpdate.getClassName());
}
Aggregations