use of org.apache.hadoop.hive.ql.plan.CreateFunctionDesc in project hive by apache.
the class FunctionSemanticAnalyzer method analyzeCreateFunction.
private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
String className = unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
throw new SemanticException("Temporary function cannot be created with a qualified name.");
}
// find any referenced resources
List<ResourceUri> resources = getResourceList(ast);
if (!isTemporaryFunction && resources == null) {
SESISON_STATE_LOG.warn("permanent functions created without USING clause will not be replicated.");
}
CreateFunctionDesc desc = new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources, null);
rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
addEntities(functionName, className, isTemporaryFunction, resources);
}
use of org.apache.hadoop.hive.ql.plan.CreateFunctionDesc in project hive by apache.
the class FunctionTask method execute.
@Override
public int execute(DriverContext driverContext) {
CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
if (createFunctionDesc != null) {
if (createFunctionDesc.isTemp()) {
return createTemporaryFunction(createFunctionDesc);
} else {
try {
if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the create event, then noop it.
LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return createPermanentFunction(Hive.get(conf), createFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to create function", e);
return 1;
}
}
}
DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
if (dropFunctionDesc != null) {
if (dropFunctionDesc.isTemp()) {
return dropTemporaryFunction(dropFunctionDesc);
} else {
try {
if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the drop event, then noop it.
LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to drop function", e);
return 1;
}
}
}
if (work.getReloadFunctionDesc() != null) {
try {
Hive.get().reloadFunctions();
} catch (Exception e) {
setException(e);
LOG.error("Failed to reload functions", e);
return 1;
}
}
CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
if (createMacroDesc != null) {
return createMacro(createMacroDesc);
}
DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
if (dropMacroDesc != null) {
return dropMacro(dropMacroDesc);
}
return 0;
}
Aggregations