Search in sources :

Example 1 with CreateMacroDesc

use of org.apache.hadoop.hive.ql.plan.CreateMacroDesc in project hive by apache.

the class FunctionTask method execute.

@Override
public int execute(DriverContext driverContext) {
    CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
    if (createFunctionDesc != null) {
        if (createFunctionDesc.isTemp()) {
            return createTemporaryFunction(createFunctionDesc);
        } else {
            try {
                if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the create event, then noop it.
                        LOG.debug("FunctionTask: Create Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return createPermanentFunction(Hive.get(conf), createFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to create function", e);
                return 1;
            }
        }
    }
    DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
    if (dropFunctionDesc != null) {
        if (dropFunctionDesc.isTemp()) {
            return dropTemporaryFunction(dropFunctionDesc);
        } else {
            try {
                if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
                    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(dropFunctionDesc.getFunctionName());
                    String dbName = qualifiedNameParts[0];
                    String funcName = qualifiedNameParts[1];
                    Map<String, String> dbProps = Hive.get().getDatabase(dbName).getParameters();
                    if (!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
                        // If the database is newer than the drop event, then noop it.
                        LOG.debug("FunctionTask: Drop Function {} is skipped as database {} " + "is newer than update", funcName, dbName);
                        return 0;
                    }
                }
                return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
            } catch (Exception e) {
                setException(e);
                LOG.error("Failed to drop function", e);
                return 1;
            }
        }
    }
    if (work.getReloadFunctionDesc() != null) {
        try {
            Hive.get().reloadFunctions();
        } catch (Exception e) {
            setException(e);
            LOG.error("Failed to reload functions", e);
            return 1;
        }
    }
    CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
    if (createMacroDesc != null) {
        return createMacro(createMacroDesc);
    }
    DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
    if (dropMacroDesc != null) {
        return dropMacro(dropMacroDesc);
    }
    return 0;
}
Also used : CreateMacroDesc(org.apache.hadoop.hive.ql.plan.CreateMacroDesc) CreateFunctionDesc(org.apache.hadoop.hive.ql.plan.CreateFunctionDesc) DropFunctionDesc(org.apache.hadoop.hive.ql.plan.DropFunctionDesc) DropMacroDesc(org.apache.hadoop.hive.ql.plan.DropMacroDesc) IOException(java.io.IOException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 2 with CreateMacroDesc

use of org.apache.hadoop.hive.ql.plan.CreateMacroDesc in project hive by apache.

the class MacroSemanticAnalyzer method analyzeCreateMacro.

@SuppressWarnings("unchecked")
private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
    String functionName = ast.getChild(0).getText();
    // Temp macros are not allowed to have qualified names.
    if (FunctionUtils.isQualifiedFunctionName(functionName)) {
        throw new SemanticException("Temporary macro cannot be created with a qualified name.");
    }
    List<FieldSchema> arguments = BaseSemanticAnalyzer.getColumns((ASTNode) ast.getChild(1), true);
    boolean isNoArgumentMacro = arguments.size() == 0;
    RowResolver rowResolver = new RowResolver();
    ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());
    ArrayList<TypeInfo> macroColTypes = new ArrayList<TypeInfo>(arguments.size());
    final Set<String> actualColumnNames = new HashSet<String>();
    if (!isNoArgumentMacro) {
        /*
       * Walk down expression to see which arguments are actually used.
       */
        Node expression = (Node) ast.getChild(2);
        PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {

            @Override
            public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
                if (nd instanceof ASTNode) {
                    ASTNode node = (ASTNode) nd;
                    if (node.getType() == HiveParser.TOK_TABLE_OR_COL) {
                        actualColumnNames.add(node.getChild(0).getText());
                    }
                }
                return null;
            }
        });
        walker.startWalking(Collections.singletonList(expression), null);
    }
    for (FieldSchema argument : arguments) {
        TypeInfo colType = TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
        rowResolver.put("", argument.getName(), new ColumnInfo(argument.getName(), colType, "", false));
        macroColNames.add(argument.getName());
        macroColTypes.add(colType);
    }
    Set<String> expectedColumnNames = new LinkedHashSet<String>(macroColNames);
    if (!expectedColumnNames.equals(actualColumnNames)) {
        throw new SemanticException("Expected columns " + expectedColumnNames + " but found " + actualColumnNames);
    }
    if (expectedColumnNames.size() != macroColNames.size()) {
        throw new SemanticException("At least one parameter name was used more than once " + macroColNames);
    }
    SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
    ;
    ExprNodeDesc body;
    if (isNoArgumentMacro) {
        body = sa.genExprNodeDesc((ASTNode) ast.getChild(1), rowResolver);
    } else {
        body = sa.genExprNodeDesc((ASTNode) ast.getChild(2), rowResolver);
    }
    CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, macroColTypes, body);
    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
    addEntities();
}
Also used : LinkedHashSet(java.util.LinkedHashSet) CreateMacroDesc(org.apache.hadoop.hive.ql.plan.CreateMacroDesc) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) Dispatcher(org.apache.hadoop.hive.ql.lib.Dispatcher) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) FunctionWork(org.apache.hadoop.hive.ql.plan.FunctionWork) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PreOrderWalker(org.apache.hadoop.hive.ql.lib.PreOrderWalker)

Aggregations

CreateMacroDesc (org.apache.hadoop.hive.ql.plan.CreateMacroDesc)2 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 LinkedHashSet (java.util.LinkedHashSet)1 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)1 Dispatcher (org.apache.hadoop.hive.ql.lib.Dispatcher)1 Node (org.apache.hadoop.hive.ql.lib.Node)1 PreOrderWalker (org.apache.hadoop.hive.ql.lib.PreOrderWalker)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 CreateFunctionDesc (org.apache.hadoop.hive.ql.plan.CreateFunctionDesc)1 DropFunctionDesc (org.apache.hadoop.hive.ql.plan.DropFunctionDesc)1 DropMacroDesc (org.apache.hadoop.hive.ql.plan.DropMacroDesc)1 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)1 FunctionWork (org.apache.hadoop.hive.ql.plan.FunctionWork)1 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)1 StringUtils.stringifyException (org.apache.hadoop.util.StringUtils.stringifyException)1