use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.
the class GetFunctionsOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
if (isAuthV2Enabled()) {
// get databases for schema pattern
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
List<String> matchingDbs;
try {
matchingDbs = metastoreClient.getDatabases(schemaPattern);
} catch (TException e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
// authorize this call on the schema objects
List<HivePrivilegeObject> privObjs = HivePrivilegeObjectUtils.getHivePrivDbObjects(matchingDbs);
String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName;
authorizeMetaGets(HiveOperationType.GET_FUNCTIONS, privObjs, cmdStr);
}
try {
if ((null == catalogName || "".equals(catalogName)) && (null == schemaName || "".equals(schemaName))) {
Set<String> functionNames = FunctionRegistry.getFunctionNames(CLIServiceUtils.patternToRegex(functionName));
for (String functionName : functionNames) {
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(functionName);
Object[] rowData = new Object[] { // FUNCTION_CAT
null, // FUNCTION_SCHEM
null, // FUNCTION_NAME
functionInfo.getDisplayName(), // REMARKS
"", (functionInfo.isGenericUDTF() ? DatabaseMetaData.functionReturnsTable : // FUNCTION_TYPE
DatabaseMetaData.functionNoTable), functionInfo.getClass().getCanonicalName() };
rowSet.addRow(rowData);
}
}
setState(OperationState.FINISHED);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.
the class VectorizationContext method isCustomUDF.
private static boolean isCustomUDF(String udfName) {
if (udfName == null) {
return false;
}
FunctionInfo funcInfo;
try {
funcInfo = FunctionRegistry.getFunctionInfo(udfName);
} catch (SemanticException e) {
LOG.warn("Failed to load " + udfName, e);
funcInfo = null;
}
if (funcInfo == null) {
return false;
}
boolean isNativeFunc = funcInfo.isNative();
return !isNativeFunc;
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.
the class SqlFunctionConverter method getHiveUDF.
public static GenericUDF getHiveUDF(SqlOperator op, RelDataType dt, int argsLength) {
String name = reverseOperatorMap.get(op);
if (name == null) {
name = op.getName();
}
// Make sure we handle unary + and - correctly.
if (argsLength == 1) {
if (name == "+") {
name = FunctionRegistry.UNARY_PLUS_FUNC_NAME;
} else if (name == "-") {
name = FunctionRegistry.UNARY_MINUS_FUNC_NAME;
}
}
FunctionInfo hFn;
try {
hFn = name != null ? FunctionRegistry.getFunctionInfo(name) : null;
} catch (SemanticException e) {
LOG.warn("Failed to load udf " + name, e);
hFn = null;
}
if (hFn == null) {
try {
hFn = handleExplicitCast(op, dt);
} catch (SemanticException e) {
LOG.warn("Failed to load udf " + name, e);
hFn = null;
}
}
return hFn == null ? null : hFn.getGenericUDF();
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project hive by apache.
the class SemanticAnalyzer method doPhase1GetAllAggregations.
/**
* DFS-scan the expressionTree to find all aggregation subtrees and put them
* in aggregations.
*
* @param expressionTree
* @param aggregations
* the key to the HashTable is the toStringTree() representation of
* the aggregation subtree.
* @throws SemanticException
*/
private void doPhase1GetAllAggregations(ASTNode expressionTree, HashMap<String, ASTNode> aggregations, List<ASTNode> wdwFns) throws SemanticException {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveParser.TOK_SUBQUERY_EXPR) {
// we don't want to include aggregate from within subquery
return;
}
if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(expressionTree.getChildCount() - 1).getType() == HiveParser.TOK_WINDOWSPEC) {
// If it is a windowing spec, we include it in the list
// Further, we will examine its children AST nodes to check whether
// there are aggregation functions within
wdwFns.add(expressionTree);
doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(expressionTree.getChildCount() - 1), aggregations, wdwFns);
return;
}
if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0).getText());
// Validate the function name
if (FunctionRegistry.getFunctionInfo(functionName) == null) {
throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
}
if (FunctionRegistry.impliesOrder(functionName)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
if (containsLeadLagUDF(expressionTree)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
aggregations.put(expressionTree.toStringTree(), expressionTree);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName);
if (!fi.isNative()) {
unparseTranslator.addIdentifierTranslation((ASTNode) expressionTree.getChild(0));
}
return;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(i), aggregations, wdwFns);
}
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo in project SQLWindowing by hbutani.
the class FunctionRegistry method registerWindowFunction.
public static void registerWindowFunction(String name, GenericUDAFResolver wFn) {
HiveFR.registerGenericUDAF(true, name, wFn);
FunctionInfo fInfo = HiveFR.getFunctionInfo(name);
WindowFunctionInfo wInfo = new WindowFunctionInfo(fInfo);
windowFunctions.put(name.toLowerCase(), wInfo);
}
Aggregations