Search in sources :

Example 1 with CreateFunctionDesc

use of org.apache.hadoop.hive.ql.plan.CreateFunctionDesc in project hive by apache.

the class FunctionSemanticAnalyzer method analyzeCreateFunction.

private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
    // ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
    String functionName = ast.getChild(0).getText().toLowerCase();
    boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
    String className = unescapeSQLString(ast.getChild(1).getText());
    // Temp functions are not allowed to have qualified names.
    if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
        throw new SemanticException("Temporary function cannot be created with a qualified name.");
    }
    // find any referenced resources
    List<ResourceUri> resources = getResourceList(ast);
    if (!isTemporaryFunction && resources == null) {
        SESISON_STATE_LOG.warn("permanent functions created without USING  clause will not be replicated.");
    }
    CreateFunctionDesc desc = new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources, null);
    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
    addEntities(functionName, className, isTemporaryFunction, resources);
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) CreateFunctionDesc(org.apache.hadoop.hive.ql.plan.CreateFunctionDesc) FunctionWork(org.apache.hadoop.hive.ql.plan.FunctionWork)

Example 2 with CreateFunctionDesc

use of org.apache.hadoop.hive.ql.plan.CreateFunctionDesc in project hive by apache.

the class FunctionTask method execute.

@Override
public int execute(DriverContext driverContext) {
    CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
    if (createFunctionDesc != null) {
        if (createFunctionDesc.isTemp()) {
            return createTemporaryFunction(createFunctionDesc);
        } else {
            try {
                if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the create event, then noop it.
                        LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return createPermanentFunction(Hive.get(conf), createFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to create function", e);
                return 1;
            }
        }
    }
    DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
    if (dropFunctionDesc != null) {
        if (dropFunctionDesc.isTemp()) {
            return dropTemporaryFunction(dropFunctionDesc);
        } else {
            try {
                if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the drop event, then noop it.
                        LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to drop function", e);
                return 1;
            }
        }
    }
    if (work.getReloadFunctionDesc() != null) {
        try {
            Hive.get().reloadFunctions();
        } catch (Exception e) {
            setException(e);
            LOG.error("Failed to reload functions", e);
            return 1;
        }
    }
    CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
    if (createMacroDesc != null) {
        return createMacro(createMacroDesc);
    }
    DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
    if (dropMacroDesc != null) {
        return dropMacro(dropMacroDesc);
    }
    return 0;
}
Also used : CreateMacroDesc(org.apache.hadoop.hive.ql.plan.CreateMacroDesc) CreateFunctionDesc(org.apache.hadoop.hive.ql.plan.CreateFunctionDesc) DropFunctionDesc(org.apache.hadoop.hive.ql.plan.DropFunctionDesc) DropMacroDesc(org.apache.hadoop.hive.ql.plan.DropMacroDesc) IOException(java.io.IOException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

CreateFunctionDesc (org.apache.hadoop.hive.ql.plan.CreateFunctionDesc)2 IOException (java.io.IOException)1 ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 CreateMacroDesc (org.apache.hadoop.hive.ql.plan.CreateMacroDesc)1 DropFunctionDesc (org.apache.hadoop.hive.ql.plan.DropFunctionDesc)1 DropMacroDesc (org.apache.hadoop.hive.ql.plan.DropMacroDesc)1 FunctionWork (org.apache.hadoop.hive.ql.plan.FunctionWork)1 StringUtils.stringifyException (org.apache.hadoop.util.StringUtils.stringifyException)1