Search in sources :

Example 1 with DropMacroDesc

use of org.apache.hadoop.hive.ql.plan.DropMacroDesc in project hive by apache.

the class MacroSemanticAnalyzer method analyzeDropMacro.

@SuppressWarnings("unchecked")
private void analyzeDropMacro(ASTNode ast) throws SemanticException {
    String functionName = ast.getChild(0).getText();
    boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
    // we want to signal an error if the function doesn't exist and we're
    // configured not to ignore this
    boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
    // Temp macros are not allowed to have qualified names.
    if (FunctionUtils.isQualifiedFunctionName(functionName)) {
        throw new SemanticException("Temporary macro name cannot be a qualified name.");
    }
    if (throwException && FunctionRegistry.getFunctionInfo(functionName) == null) {
        throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
    }
    DropMacroDesc desc = new DropMacroDesc(functionName);
    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
    addEntities();
}
Also used : DropMacroDesc(org.apache.hadoop.hive.ql.plan.DropMacroDesc) FunctionWork(org.apache.hadoop.hive.ql.plan.FunctionWork)

Example 2 with DropMacroDesc

use of org.apache.hadoop.hive.ql.plan.DropMacroDesc in project hive by apache.

the class FunctionTask method execute.

@Override
public int execute(DriverContext driverContext) {
    CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
    if (createFunctionDesc != null) {
        if (createFunctionDesc.isTemp()) {
            return createTemporaryFunction(createFunctionDesc);
        } else {
            try {
                if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the create event, then noop it.
                        LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return createPermanentFunction(Hive.get(conf), createFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to create function", e);
                return 1;
            }
        }
    }
    DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
    if (dropFunctionDesc != null) {
        if (dropFunctionDesc.isTemp()) {
            return dropTemporaryFunction(dropFunctionDesc);
        } else {
            try {
                if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the drop event, then noop it.
                        LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to drop function", e);
                return 1;
            }
        }
    }
    if (work.getReloadFunctionDesc() != null) {
        try {
            Hive.get().reloadFunctions();
        } catch (Exception e) {
            setException(e);
            LOG.error("Failed to reload functions", e);
            return 1;
        }
    }
    CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
    if (createMacroDesc != null) {
        return createMacro(createMacroDesc);
    }
    DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
    if (dropMacroDesc != null) {
        return dropMacro(dropMacroDesc);
    }
    return 0;
}
Also used : CreateMacroDesc(org.apache.hadoop.hive.ql.plan.CreateMacroDesc) CreateFunctionDesc(org.apache.hadoop.hive.ql.plan.CreateFunctionDesc) DropFunctionDesc(org.apache.hadoop.hive.ql.plan.DropFunctionDesc) DropMacroDesc(org.apache.hadoop.hive.ql.plan.DropMacroDesc) IOException(java.io.IOException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

DropMacroDesc (org.apache.hadoop.hive.ql.plan.DropMacroDesc)2 IOException (java.io.IOException)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 CreateFunctionDesc (org.apache.hadoop.hive.ql.plan.CreateFunctionDesc)1 CreateMacroDesc (org.apache.hadoop.hive.ql.plan.CreateMacroDesc)1 DropFunctionDesc (org.apache.hadoop.hive.ql.plan.DropFunctionDesc)1 FunctionWork (org.apache.hadoop.hive.ql.plan.FunctionWork)1 StringUtils.stringifyException (org.apache.hadoop.util.StringUtils.stringifyException)1