Search in sources :

Example 6 with FunctionResource

use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.

the class DDLTask method describeFunction.

/**
 * Shows a description of a function.
 * @param db
 *
 * @param descFunc
 *          is the function we are describing
 * @throws HiveException
 */
private int describeFunction(Hive db, DescFunctionDesc descFunc) throws HiveException, SQLException {
    String funcName = descFunc.getName();
    // write the results in the file
    DataOutputStream outStream = getOutputStream(descFunc.getResFile());
    try {
        // get the function documentation
        Description desc = null;
        Class<?> funcClass = null;
        FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(funcName);
        if (functionInfo != null) {
            funcClass = functionInfo.getFunctionClass();
        }
        if (funcClass != null) {
            desc = AnnotationUtils.getAnnotation(funcClass, Description.class);
        }
        if (desc != null) {
            outStream.writeBytes(desc.value().replace("_FUNC_", funcName));
            if (descFunc.isExtended()) {
                Set<String> synonyms = FunctionRegistry.getFunctionSynonyms(funcName);
                if (synonyms.size() > 0) {
                    outStream.writeBytes("\nSynonyms: " + join(synonyms, ", "));
                }
                if (desc.extended().length() > 0) {
                    outStream.writeBytes("\n" + desc.extended().replace("_FUNC_", funcName));
                }
            }
        } else {
            if (funcClass != null) {
                outStream.writeBytes("There is no documentation for function '" + funcName + "'");
            } else {
                outStream.writeBytes("Function '" + funcName + "' does not exist.");
            }
        }
        outStream.write(terminator);
        if (descFunc.isExtended()) {
            if (funcClass != null) {
                outStream.writeBytes("Function class:" + funcClass.getName() + "\n");
            }
            if (functionInfo != null) {
                outStream.writeBytes("Function type:" + functionInfo.getFunctionType() + "\n");
                FunctionResource[] resources = functionInfo.getResources();
                if (resources != null) {
                    for (FunctionResource resource : resources) {
                        outStream.writeBytes("Resource:" + resource.getResourceURI() + "\n");
                    }
                }
            }
        }
    } catch (FileNotFoundException e) {
        LOG.warn("describe function: ", e);
        return 1;
    } catch (IOException e) {
        LOG.warn("describe function: ", e);
        return 1;
    } catch (Exception e) {
        throw new HiveException(e);
    } finally {
        IOUtils.closeStream(outStream);
    }
    return 0;
}
Also used : FunctionResource(org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataOutputStream(java.io.DataOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Example 7 with FunctionResource

use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.

the class TestFunctionRegistry method testIsPermanentFunction.

@Test
public void testIsPermanentFunction() throws Exception {
    // Setup exprNode
    GenericUDF udf = new GenericUDFCurrentTimestamp();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    ExprNodeGenericFuncDesc fnExpr = new ExprNodeGenericFuncDesc(TypeInfoFactory.timestampTypeInfo, udf, children);
    assertFalse("Function not added as permanent yet", FunctionRegistry.isPermanentFunction(fnExpr));
    // Now register as permanent function
    FunctionResource[] emptyResources = new FunctionResource[] {};
    FunctionRegistry.registerPermanentFunction("default.perm_current_timestamp", GenericUDFCurrentTimestamp.class.getName(), true, emptyResources);
    assertTrue("Function should now be recognized as permanent function", FunctionRegistry.isPermanentFunction(fnExpr));
}
Also used : FunctionResource(org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFCurrentTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Test(org.junit.Test)

Example 8 with FunctionResource

use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.

the class CreateFunctionOperation method createTemporaryFunction.

private int createTemporaryFunction() {
    try {
        // Add any required resources
        FunctionResource[] resources = FunctionUtils.toFunctionResource(desc.getResources());
        FunctionUtils.addFunctionResources(resources);
        Class<?> udfClass = getUdfClass();
        FunctionInfo registered = FunctionRegistry.registerTemporaryUDF(desc.getName(), udfClass, resources);
        if (registered != null) {
            return 0;
        } else {
            context.getConsole().printError("FAILED: Class " + desc.getClassName() + " does not implement UDF, GenericUDF, or UDAF");
            return 1;
        }
    } catch (HiveException e) {
        context.getConsole().printError("FAILED: " + e.toString());
        LOG.info("create function: ", e);
        return 1;
    } catch (ClassNotFoundException e) {
        context.getConsole().printError("FAILED: Class " + desc.getClassName() + " not found");
        LOG.info("create function: ", e);
        return 1;
    }
}
Also used : FunctionResource(org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo)

Aggregations

FunctionResource (org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource)8 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 IOException (java.io.IOException)2 ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 SessionState (org.apache.hadoop.hive.ql.session.SessionState)2 DataOutputStream (java.io.DataOutputStream)1 FileNotFoundException (java.io.FileNotFoundException)1 URI (java.net.URI)1 URISyntaxException (java.net.URISyntaxException)1 SQLException (java.sql.SQLException)1 ArrayList (java.util.ArrayList)1 ExecutionException (java.util.concurrent.ExecutionException)1 PatternSyntaxException (java.util.regex.PatternSyntaxException)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1