Search in sources :

Example 1 with FunctionInfo

use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.

the class GetFunctionsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    if (isAuthV2Enabled()) {
        // get databases for schema pattern
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        List<String> matchingDbs;
        try {
            matchingDbs = metastoreClient.getDatabases(schemaPattern);
        } catch (TException e) {
            setState(OperationState.ERROR);
            throw new HiveSQLException(e);
        }
        // authorize this call on the schema objects
        List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
        String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
        authorizeMetaGets(HiveOperationType.GET_FUNCTIONS, privObjs, cmdStr);
    }
    try {
        if ((null == catalogName || "".equals(catalogName)) && (null == schemaName || "".equals(schemaName))) {
            Set<String> functionNames = FunctionRegistry.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
            for (String functionName : functionNames) {
                FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
                Object[] rowData = new Object[] { // FUNCTION_CAT
                null, // FUNCTION_SCHEM
                null, // FUNCTION_NAME
                functionInfo.getDisplayName(), // REMARKS
                "", (functionInfo.isGenericUDTF() ? DatabaseMetaData.functionReturnsTable : // FUNCTION_TYPE
                DatabaseMetaData.functionNoTable), functionInfo.getClass().getCanonicalName() };
                rowSet.addRow(rowData);
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : TException(org.apache.thrift.TException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TException(org.apache.thrift.TException)

Example 2 with FunctionInfo

use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.

the class VectorizationContext method isCustomUDF.

private static boolean isCustomUDF(String udfName) {
    if (udfName == null) {
        return false;
    }
    FunctionInfo funcInfo;
    try {
        funcInfo = FunctionRegistry.getFunctionInfo(udfName);
    } catch (SemanticException e) {
        LOG.warn("Failed to load " + udfName, e);
        funcInfo = null;
    }
    if (funcInfo == null) {
        return false;
    }
    boolean isNativeFunc = funcInfo.isNative();
    return !isNativeFunc;
}
Also used : FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with FunctionInfo

use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.

the class SqlFunctionConverter method getHiveUDF.

public static GenericUDF getHiveUDF(SqlOperator op, RelDataType dt, int argsLength) {
    String name = reverseOperatorMap.get(op);
    if (name == null) {
        name = op.getName();
    }
    // Make sure we handle unary + and - correctly.
    if (argsLength == 1) {
        if (name == "+") {
            name = FunctionRegistry.UNARY_PLUS_FUNC_NAME;
        } else if (name == "-") {
            name = FunctionRegistry.UNARY_MINUS_FUNC_NAME;
        }
    }
    FunctionInfo hFn;
    try {
        hFn = name != null ? FunctionRegistry.getFunctionInfo(name) : null;
    } catch (SemanticException e) {
        LOG.warn("Failed to load udf " + name, e);
        hFn = null;
    }
    if (hFn == null) {
        try {
            hFn = handleExplicitCast(op, dt);
        } catch (SemanticException e) {
            LOG.warn("Failed to load udf " + name, e);
            hFn = null;
        }
    }
    return hFn == null ? null : hFn.getGenericUDF();
}
Also used : FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Example 4 with FunctionInfo

use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.

the class SemanticAnalyzer method doPhase1GetAllAggregations.

/**
   * DFS-scan the expressionTree to find all aggregation subtrees and put them
   * in aggregations.
   *
   * @param expressionTree
   * @param aggregations
   *          the key to the HashTable is the toStringTree() representation of
   *          the aggregation subtree.
   * @throws SemanticException
   */
private void doPhase1GetAllAggregations(ASTNode expressionTree, HashMap<String, ASTNode> aggregations, List<ASTNode> wdwFns) throws SemanticException {
    int exprTokenType = expressionTree.getToken().getType();
    if (exprTokenType == HiveParser.TOK_SUBQUERY_EXPR) {
        // we don't want to include aggregate from within subquery
        return;
    }
    if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
        assert (expressionTree.getChildCount() != 0);
        if (expressionTree.getChild(expressionTree.getChildCount() - 1).getType() == HiveParser.TOK_WINDOWSPEC) {
            // If it is a windowing spec, we include it in the list
            // Further, we will examine its children AST nodes to check whether
            // there are aggregation functions within
            wdwFns.add(expressionTree);
            doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(expressionTree.getChildCount() - 1), aggregations, wdwFns);
            return;
        }
        if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
            String functionName = unescapeIdentifier(expressionTree.getChild(0).getText());
            // Validate the function name
            if (FunctionRegistry.getFunctionInfo(functionName) == null) {
                throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
            }
            if (FunctionRegistry.impliesOrder(functionName)) {
                throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
            }
            if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
                if (containsLeadLagUDF(expressionTree)) {
                    throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
                }
                aggregations.put(expressionTree.toStringTree(), expressionTree);
                FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName);
                if (!fi.isNative()) {
                    unparseTranslator.addIdentifierTranslation((ASTNode) expressionTree.getChild(0));
                }
                return;
            }
        }
    }
    for (int i = 0; i < expressionTree.getChildCount(); i++) {
        doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(i), aggregations, wdwFns);
    }
}
Also used : FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Example 5 with FunctionInfo

use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project SQLWindowing by hbutani.

the class FunctionRegistry method registerWindowFunction.

public static void registerWindowFunction(String name, GenericUDAFResolver wFn) {
    HiveFR.registerGenericUDAF(true, name, wFn);
    FunctionInfo fInfo = HiveFR.getFunctionInfo(name);
    WindowFunctionInfo wInfo = new WindowFunctionInfo(fInfo);
    windowFunctions.put(name.toLowerCase(), wInfo);
}
Also used : FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo)

Aggregations

FunctionInfo (org.apache.hadoop.hive.ql.exec.FunctionInfo)9 CalciteSemanticException (org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)3 ArrayList (java.util.ArrayList)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 ImmutableList (com.google.common.collect.ImmutableList)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 RelTraitSet (org.apache.calcite.plan.RelTraitSet)1 RelNode (org.apache.calcite.rel.RelNode)1 RelDataType (org.apache.calcite.rel.type.RelDataType)1 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)1 RexInputRef (org.apache.calcite.rex.RexInputRef)1 RexNode (org.apache.calcite.rex.RexNode)1 SqlOperator (org.apache.calcite.sql.SqlOperator)1 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)1 AbstractMapJoinOperator (org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator)1 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)1 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)1 FilterOperator (org.apache.hadoop.hive.ql.exec.FilterOperator)1