use of org.apache.flink.table.catalog.CatalogFunctionImpl in project flink by apache.
the class HiveCatalog method createFunction.
// ------ functions ------
@Override
public void createFunction(ObjectPath functionPath, CatalogFunction function, boolean ignoreIfExists) throws FunctionAlreadyExistException, DatabaseNotExistException, CatalogException {
checkNotNull(functionPath, "functionPath cannot be null");
checkNotNull(function, "function cannot be null");
Function hiveFunction;
if (function instanceof CatalogFunctionImpl) {
hiveFunction = instantiateHiveFunction(functionPath, function);
} else {
throw new CatalogException(String.format("Unsupported catalog function type %s", function.getClass().getName()));
}
try {
client.createFunction(hiveFunction);
} catch (NoSuchObjectException e) {
throw new DatabaseNotExistException(getName(), functionPath.getDatabaseName(), e);
} catch (AlreadyExistsException e) {
if (!ignoreIfExists) {
throw new FunctionAlreadyExistException(getName(), functionPath, e);
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to create function %s", functionPath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogFunctionImpl in project flink by apache.
the class SqlToOperationConverter method convertCreateFunction.
/**
* Convert CREATE FUNCTION statement.
*/
private Operation convertCreateFunction(SqlCreateFunction sqlCreateFunction) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateFunction.getFunctionIdentifier());
if (sqlCreateFunction.isSystemFunction()) {
return new CreateTempSystemFunctionOperation(unresolvedIdentifier.getObjectName(), sqlCreateFunction.getFunctionClassName().getValueAs(String.class), sqlCreateFunction.isIfNotExists(), parseLanguage(sqlCreateFunction.getFunctionLanguage()));
} else {
FunctionLanguage language = parseLanguage(sqlCreateFunction.getFunctionLanguage());
CatalogFunction catalogFunction = new CatalogFunctionImpl(sqlCreateFunction.getFunctionClassName().getValueAs(String.class), language);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new CreateCatalogFunctionOperation(identifier, catalogFunction, sqlCreateFunction.isIfNotExists(), sqlCreateFunction.isTemporary());
}
}
use of org.apache.flink.table.catalog.CatalogFunctionImpl in project flink by apache.
the class SqlToOperationConverter method convertAlterFunction.
/**
* Convert ALTER FUNCTION statement.
*/
private Operation convertAlterFunction(SqlAlterFunction sqlAlterFunction) {
if (sqlAlterFunction.isSystemFunction()) {
throw new ValidationException("Alter temporary system function is not supported");
}
FunctionLanguage language = parseLanguage(sqlAlterFunction.getFunctionLanguage());
CatalogFunction catalogFunction = new CatalogFunctionImpl(sqlAlterFunction.getFunctionClassName().getValueAs(String.class), language);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterFunction.getFunctionIdentifier());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new AlterCatalogFunctionOperation(identifier, catalogFunction, sqlAlterFunction.isIfExists(), sqlAlterFunction.isTemporary());
}
use of org.apache.flink.table.catalog.CatalogFunctionImpl in project flink by apache.
the class HiveCatalog method alterFunction.
@Override
public void alterFunction(ObjectPath functionPath, CatalogFunction newFunction, boolean ignoreIfNotExists) throws FunctionNotExistException, CatalogException {
checkNotNull(functionPath, "functionPath cannot be null");
checkNotNull(newFunction, "newFunction cannot be null");
try {
// check if function exists
getFunction(functionPath);
Function hiveFunction;
if (newFunction instanceof CatalogFunctionImpl) {
hiveFunction = instantiateHiveFunction(functionPath, newFunction);
} else {
throw new CatalogException(String.format("Unsupported catalog function type %s", newFunction.getClass().getName()));
}
client.alterFunction(functionPath.getDatabaseName(), functionPath.getObjectName(), hiveFunction);
} catch (FunctionNotExistException e) {
if (!ignoreIfNotExists) {
throw e;
}
} catch (TException e) {
throw new CatalogException(String.format("Failed to alter function %s", functionPath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.CatalogFunctionImpl in project flink by apache.
the class HiveCatalogUdfITCase method testFlinkUdf.
@Test
public void testFlinkUdf() throws Exception {
final TableSchema schema = TableSchema.builder().field("name", DataTypes.STRING()).field("age", DataTypes.INT()).build();
final Map<String, String> sourceOptions = new HashMap<>();
sourceOptions.put("connector.type", "filesystem");
sourceOptions.put("connector.path", getClass().getResource("/csv/test.csv").getPath());
sourceOptions.put("format.type", "csv");
CatalogTable source = new CatalogTableImpl(schema, sourceOptions, "Comment.");
hiveCatalog.createTable(new ObjectPath(HiveCatalog.DEFAULT_DB, sourceTableName), source, false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudf"), new CatalogFunctionImpl(TestHiveSimpleUDF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "mygenericudf"), new CatalogFunctionImpl(TestHiveGenericUDF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudtf"), new CatalogFunctionImpl(TestHiveUDTF.class.getCanonicalName()), false);
hiveCatalog.createFunction(new ObjectPath(HiveCatalog.DEFAULT_DB, "myudaf"), new CatalogFunctionImpl(GenericUDAFSum.class.getCanonicalName()), false);
testUdf(true);
testUdf(false);
}
Aggregations