use of org.apache.hadoop.hive.ql.plan.DropFunctionDesc in project hive by apache.
the class FunctionTask method execute.
@Override
public int execute(DriverContext driverContext) {
CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
if (createFunctionDesc != null) {
if (createFunctionDesc.isTemp()) {
return createTemporaryFunction(createFunctionDesc);
} else {
try {
if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the create event, then noop it.
LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return createPermanentFunction(Hive.get(conf), createFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to create function", e);
return 1;
}
}
}
DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
if (dropFunctionDesc != null) {
if (dropFunctionDesc.isTemp()) {
return dropTemporaryFunction(dropFunctionDesc);
} else {
try {
if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
// If the database is newer than the drop event, then noop it.
LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
return 0;
}
}
return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
} catch (Exception e) {
setException(e);
LOG.error("Failed to drop function", e);
return 1;
}
}
}
if (work.getReloadFunctionDesc() != null) {
try {
Hive.get().reloadFunctions();
} catch (Exception e) {
setException(e);
LOG.error("Failed to reload functions", e);
return 1;
}
}
CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
if (createMacroDesc != null) {
return createMacro(createMacroDesc);
}
DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
if (dropMacroDesc != null) {
return dropMacro(dropMacroDesc);
}
return 0;
}
use of org.apache.hadoop.hive.ql.plan.DropFunctionDesc in project hive by apache.
the class FunctionSemanticAnalyzer method analyzeDropFunction.
private void analyzeDropFunction(ASTNode ast) throws SemanticException {
// ^(TOK_DROPFUNCTION identifier ifExists? $temp?)
String functionName = ast.getChild(0).getText();
boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
// we want to signal an error if the function doesn't exist and we're
// configured not to ignore this
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
FunctionInfo info = FunctionRegistry.getFunctionInfo(functionName);
if (info == null) {
if (throwException) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
} else {
// Fail silently
return;
}
} else if (info.isBuiltIn()) {
throw new SemanticException(ErrorMsg.DROP_NATIVE_FUNCTION.getMsg(functionName));
}
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
DropFunctionDesc desc = new DropFunctionDesc(functionName, isTemporaryFunction, null);
rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
addEntities(functionName, info.getClassName(), isTemporaryFunction, null);
}
Aggregations