Search in sources :

Example 1 with HiveAuthFactory

use of org.apache.hive.service.auth.HiveAuthFactory in project hive by apache.

the class ThriftBinaryCLIService method run.

@Override
public void run() {
    try {
        // Server thread pool
        String threadPoolName = "HiveServer2-Handler-Pool";
        ExecutorService executorService = new ThreadPoolExecutorWithOomHook(minWorkerThreads, maxWorkerThreads, workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new ThreadFactoryWithGarbageCleanup(threadPoolName), oomHook);
        // Thrift configs
        hiveAuthFactory = new HiveAuthFactory(hiveConf);
        TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
        TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
        TServerSocket serverSocket = null;
        List<String> sslVersionBlacklist = new ArrayList<String>();
        for (String sslVersion : hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) {
            sslVersionBlacklist.add(sslVersion);
        }
        if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
            serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum);
        } else {
            String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
            if (keyStorePath.isEmpty()) {
                throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
            }
            String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
            serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, sslVersionBlacklist);
        }
        // Server args
        int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE);
        int requestTimeout = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS);
        int beBackoffSlotLength = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS);
        TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket).processorFactory(processorFactory).transportFactory(transportFactory).protocolFactory(new TBinaryProtocol.Factory()).inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize)).requestTimeout(requestTimeout).requestTimeoutUnit(TimeUnit.SECONDS).beBackoffSlotLength(beBackoffSlotLength).beBackoffSlotLengthUnit(TimeUnit.MILLISECONDS).executorService(executorService);
        // TCP Server
        server = new TThreadPoolServer(sargs);
        server.setServerEventHandler(new TServerEventHandler() {

            @Override
            public ServerContext createContext(TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                        metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                return new ThriftCLIServerContext();
            }

            @Override
            public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
                SessionHandle sessionHandle = context.getSessionHandle();
                if (sessionHandle != null) {
                    LOG.info("Session disconnected without closing properly. ");
                    try {
                        boolean close = cliService.getSessionManager().getSession(sessionHandle).getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT);
                        LOG.info((close ? "" : "Not ") + "Closing the session: " + sessionHandle);
                        if (close) {
                            cliService.closeSession(sessionHandle);
                        }
                    } catch (HiveSQLException e) {
                        LOG.warn("Failed to close session: " + e, e);
                    }
                }
            }

            @Override
            public void preServe() {
            }

            @Override
            public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
                currentServerContext.set(serverContext);
            }
        });
        String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + " on port " + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
        LOG.info(msg);
        server.serve();
    } catch (Throwable t) {
        LOG.error("Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t);
        System.exit(-1);
    }
}
Also used : ThreadFactoryWithGarbageCleanup(org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup) TServerEventHandler(org.apache.thrift.server.TServerEventHandler) ArrayList(java.util.ArrayList) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) MetricsFactory(org.apache.hadoop.hive.common.metrics.common.MetricsFactory) TTransportFactory(org.apache.thrift.transport.TTransportFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) TServerSocket(org.apache.thrift.transport.TServerSocket) Metrics(org.apache.hadoop.hive.common.metrics.common.Metrics) TProtocol(org.apache.thrift.protocol.TProtocol) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) TTransportFactory(org.apache.thrift.transport.TTransportFactory) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) ServerContext(org.apache.thrift.server.ServerContext) ExecutorService(java.util.concurrent.ExecutorService) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TTransport(org.apache.thrift.transport.TTransport) TThreadPoolServer(org.apache.thrift.server.TThreadPoolServer)

Example 2 with HiveAuthFactory

use of org.apache.hive.service.auth.HiveAuthFactory in project hive by apache.

the class ThriftHttpCLIService method run.

/**
 * Configure Jetty to serve http requests. Example of a client connection URL:
 * http://localhost:10000/servlets/thrifths2/ A gateway may cause actual target URL to differ,
 * e.g. http://gateway:port/hive2/servlets/thrifths2/
 */
@Override
public void run() {
    try {
        // Server thread pool
        // Start with minWorkerThreads, expand till maxWorkerThreads and reject subsequent requests
        String threadPoolName = "HiveServer2-HttpHandler-Pool";
        ExecutorService executorService = new ThreadPoolExecutorWithOomHook(minWorkerThreads, maxWorkerThreads, workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new ThreadFactoryWithGarbageCleanup(threadPoolName), oomHook);
        ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
        // HTTP Server
        httpServer = new Server(threadPool);
        ServerConnector connector;
        final HttpConfiguration conf = new HttpConfiguration();
        // Configure header size
        int requestHeaderSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_REQUEST_HEADER_SIZE);
        int responseHeaderSize = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_RESPONSE_HEADER_SIZE);
        conf.setRequestHeaderSize(requestHeaderSize);
        conf.setResponseHeaderSize(responseHeaderSize);
        final HttpConnectionFactory http = new HttpConnectionFactory(conf);
        boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
        String schemeName = useSsl ? "https" : "http";
        // Change connector if SSL is used
        if (useSsl) {
            String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
            String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
            if (keyStorePath.isEmpty()) {
                throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
            }
            SslContextFactory sslContextFactory = new SslContextFactory();
            String[] excludedProtocols = hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",");
            LOG.info("HTTP Server SSL: adding excluded protocols: " + Arrays.toString(excludedProtocols));
            sslContextFactory.addExcludeProtocols(excludedProtocols);
            LOG.info("HTTP Server SSL: SslContextFactory.getExcludeProtocols = " + Arrays.toString(sslContextFactory.getExcludeProtocols()));
            sslContextFactory.setKeyStorePath(keyStorePath);
            sslContextFactory.setKeyStorePassword(keyStorePassword);
            connector = new ServerConnector(httpServer, sslContextFactory, http);
        } else {
            connector = new ServerConnector(httpServer, http);
        }
        connector.setPort(portNum);
        // Linux:yes, Windows:no
        connector.setReuseAddress(true);
        int maxIdleTime = (int) hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME, TimeUnit.MILLISECONDS);
        connector.setIdleTimeout(maxIdleTime);
        httpServer.addConnector(connector);
        // Thrift configs
        hiveAuthFactory = new HiveAuthFactory(hiveConf);
        TProcessor processor = new TCLIService.Processor<Iface>(this);
        TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
        // Set during the init phase of HiveServer2 if auth mode is kerberos
        // UGI for the hive/_HOST (kerberos) principal
        UserGroupInformation serviceUGI = cliService.getServiceUGI();
        // UGI for the http/_HOST (SPNego) principal
        UserGroupInformation httpUGI = cliService.getHttpUGI();
        String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
        TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, authType, serviceUGI, httpUGI, hiveAuthFactory);
        // Context handler
        final ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
        context.setContextPath("/");
        if (hiveConf.getBoolean(ConfVars.HIVE_SERVER2_XSRF_FILTER_ENABLED.varname, false)) {
            // context.addFilter(Utils.getXSRFFilterHolder(null, null), "/" ,
            // FilterMapping.REQUEST);
            // Filtering does not work here currently, doing filter in ThriftHttpServlet
            LOG.debug("XSRF filter enabled");
        } else {
            LOG.warn("XSRF filter disabled");
        }
        final String httpPath = getHttpPath(hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
        if (HiveConf.getBoolVar(hiveConf, ConfVars.HIVE_SERVER2_THRIFT_HTTP_COMPRESSION_ENABLED)) {
            final GzipHandler gzipHandler = new GzipHandler();
            gzipHandler.setHandler(context);
            gzipHandler.addIncludedMethods(HttpMethod.POST);
            gzipHandler.addIncludedMimeTypes(APPLICATION_THRIFT);
            httpServer.setHandler(gzipHandler);
        } else {
            httpServer.setHandler(context);
        }
        context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
        // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
        // Finally, start the server
        httpServer.start();
        String msg = "Started " + ThriftHttpCLIService.class.getSimpleName() + " in " + schemeName + " mode on port " + portNum + " path=" + httpPath + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
        LOG.info(msg);
        httpServer.join();
    } catch (Throwable t) {
        LOG.error("Error starting HiveServer2: could not start " + ThriftHttpCLIService.class.getSimpleName(), t);
        System.exit(-1);
    }
}
Also used : TProtocolFactory(org.apache.thrift.protocol.TProtocolFactory) ThreadFactoryWithGarbageCleanup(org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup) TProcessor(org.apache.thrift.TProcessor) Server(org.eclipse.jetty.server.Server) ServletHolder(org.eclipse.jetty.servlet.ServletHolder) HttpConnectionFactory(org.eclipse.jetty.server.HttpConnectionFactory) SslContextFactory(org.eclipse.jetty.util.ssl.SslContextFactory) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TProtocolFactory(org.apache.thrift.protocol.TProtocolFactory) HttpConfiguration(org.eclipse.jetty.server.HttpConfiguration) TServlet(org.apache.thrift.server.TServlet) ServerConnector(org.eclipse.jetty.server.ServerConnector) SslContextFactory(org.eclipse.jetty.util.ssl.SslContextFactory) TProcessor(org.apache.thrift.TProcessor) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) HttpConnectionFactory(org.eclipse.jetty.server.HttpConnectionFactory) GzipHandler(org.eclipse.jetty.server.handler.gzip.GzipHandler) ExecutorService(java.util.concurrent.ExecutorService) ExecutorThreadPool(org.eclipse.jetty.util.thread.ExecutorThreadPool) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) ServletContextHandler(org.eclipse.jetty.servlet.ServletContextHandler)

Example 3 with HiveAuthFactory

use of org.apache.hive.service.auth.HiveAuthFactory in project hive by apache.

the class TestHiveAuthFactory method testStartTokenManagerForMemoryTokenStore.

/**
 * Verify that delegation token manager is started with no exception for MemoryTokenStore
 * @throws Exception
 */
@Test
public void testStartTokenManagerForMemoryTokenStore() throws Exception {
    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.KERBEROS.getAuthName());
    String principalName = miniHiveKdc.getFullHiveServicePrincipal();
    System.out.println("Principal: " + principalName);
    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
    String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
    System.out.println("keyTabFile: " + keyTabFile);
    Assert.assertNotNull(keyTabFile);
    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
    HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
    Assert.assertNotNull(authFactory);
    Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory", authFactory.getAuthTransFactory().getClass().getName());
}
Also used : HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) Test(org.junit.Test)

Example 4 with HiveAuthFactory

use of org.apache.hive.service.auth.HiveAuthFactory in project hive by apache.

the class ThriftBinaryCLIService method initServer.

@Override
protected void initServer() {
    try {
        // Server thread pool
        String threadPoolName = "HiveServer2-Handler-Pool";
        ExecutorService executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads, workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<>(), new ThreadFactoryWithGarbageCleanup(threadPoolName));
        // Thrift configs
        hiveAuthFactory = new HiveAuthFactory(hiveConf);
        TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
        TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
        TServerSocket serverSocket = null;
        List<String> sslVersionBlacklist = new ArrayList<String>();
        for (String sslVersion : hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) {
            sslVersionBlacklist.add(sslVersion);
        }
        if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
            serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum);
        } else {
            String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
            if (keyStorePath.isEmpty()) {
                throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
            }
            String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
            String keyStoreType = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_TYPE).trim();
            String keyStoreAlgorithm = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYMANAGERFACTORY_ALGORITHM).trim();
            String includeCiphersuites = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_BINARY_INCLUDE_CIPHERSUITES).trim();
            serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, keyStoreType, keyStoreAlgorithm, sslVersionBlacklist, includeCiphersuites);
        }
        // Server args
        int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE);
        int requestTimeout = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS);
        int beBackoffSlotLength = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS);
        TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket).processorFactory(processorFactory).transportFactory(transportFactory).protocolFactory(new TBinaryProtocol.Factory()).inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize)).executorService(executorService);
        // TCP Server
        server = new TThreadPoolServer(sargs);
        server.setServerEventHandler(new TServerEventHandler() {

            @Override
            public ServerContext createContext(TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                    metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT);
                }
                return new ThriftCLIServerContext();
            }

            /**
             * This is called by the Thrift server when the underlying client
             * connection is cleaned up by the server because the connection has
             * been closed.
             */
            @Override
            public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                }
                final ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
                final Optional<SessionHandle> sessionHandle = context.getSessionHandle();
                if (sessionHandle.isPresent()) {
                    // Normally, the client should politely inform the server it is
                    // closing its session with Hive before closing its network
                    // connection. However, if the client connection dies for any reason
                    // (load-balancer round-robin configuration, firewall kills
                    // long-running sessions, bad client, failed client, timed-out
                    // client, etc.) then the server will close the connection without
                    // having properly cleaned up the Hive session (resources,
                    // configuration, logging etc.). That needs to be cleaned up now.
                    LOG.warn("Client connection bound to {} unexpectedly closed: closing this Hive session to release its resources. " + "The connection processed {} total messages during its lifetime of {}ms. Inspect the client connection " + "for time-out, firewall killing the connection, invalid load balancer configuration, etc.", sessionHandle, context.getMessagesProcessedCount(), context.getDuration().toMillis());
                    try {
                        final boolean close = cliService.getSessionManager().getSession(sessionHandle.get()).getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT);
                        if (close) {
                            cliService.closeSession(sessionHandle.get());
                        } else {
                            LOG.warn("Session not actually closed because configuration {} is set to false", ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT.varname);
                        }
                    } catch (HiveSQLException e) {
                        LOG.warn("Failed to close session", e);
                    }
                } else {
                    // able to create one in the first place
                    if (context.getSessionCount() == 0) {
                        LOG.info("A client connection was closed before creating a Hive session. " + "Most likely it is a client that is connecting to this server then " + "immediately closing the socket (i.e., TCP health check or port scanner)");
                    }
                }
            }

            @Override
            public void preServe() {
            }

            @Override
            public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
                ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
                currentServerContext.set(context);
                context.incMessagesProcessedCount();
            }
        });
        String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + " on port " + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
        LOG.info(msg);
    } catch (Exception e) {
        throw new RuntimeException("Failed to init thrift server", e);
    }
}
Also used : ThreadFactoryWithGarbageCleanup(org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup) TServerEventHandler(org.apache.thrift.server.TServerEventHandler) ArrayList(java.util.ArrayList) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) MetricsFactory(org.apache.hadoop.hive.common.metrics.common.MetricsFactory) TTransportFactory(org.apache.thrift.transport.TTransportFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) TServerSocket(org.apache.thrift.transport.TServerSocket) Metrics(org.apache.hadoop.hive.common.metrics.common.Metrics) TProtocol(org.apache.thrift.protocol.TProtocol) Optional(java.util.Optional) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TTransportFactory(org.apache.thrift.transport.TTransportFactory) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) ServerContext(org.apache.thrift.server.ServerContext) ExecutorService(java.util.concurrent.ExecutorService) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) TTransport(org.apache.thrift.transport.TTransport) TThreadPoolServer(org.apache.thrift.server.TThreadPoolServer)

Example 5 with HiveAuthFactory

use of org.apache.hive.service.auth.HiveAuthFactory in project hive by apache.

the class TestHiveAuthFactory method testStartTokenManagerForDBTokenStore.

/**
 * Verify that delegation token manager is started with no exception for DBTokenStore
 * @throws Exception
 */
@Test
public void testStartTokenManagerForDBTokenStore() throws Exception {
    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.KERBEROS.getAuthName());
    String principalName = miniHiveKdc.getFullHiveServicePrincipal();
    System.out.println("Principal: " + principalName);
    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
    String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
    System.out.println("keyTabFile: " + keyTabFile);
    Assert.assertNotNull(keyTabFile);
    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
    hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.metastore.security.DBTokenStore");
    HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
    Assert.assertNotNull(authFactory);
    Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory", authFactory.getAuthTransFactory().getClass().getName());
}
Also used : HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) Test(org.junit.Test)

Aggregations

HiveAuthFactory (org.apache.hive.service.auth.HiveAuthFactory)6 ThreadFactoryWithGarbageCleanup (org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup)4 ExecutorService (java.util.concurrent.ExecutorService)3 MetricsFactory (org.apache.hadoop.hive.common.metrics.common.MetricsFactory)3 ArrayList (java.util.ArrayList)2 ThreadPoolExecutor (java.util.concurrent.ThreadPoolExecutor)2 Metrics (org.apache.hadoop.hive.common.metrics.common.Metrics)2 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)2 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)2 TProcessor (org.apache.thrift.TProcessor)2 TProcessorFactory (org.apache.thrift.TProcessorFactory)2 TBinaryProtocol (org.apache.thrift.protocol.TBinaryProtocol)2 TProtocol (org.apache.thrift.protocol.TProtocol)2 TProtocolFactory (org.apache.thrift.protocol.TProtocolFactory)2 ServerContext (org.apache.thrift.server.ServerContext)2 TServerEventHandler (org.apache.thrift.server.TServerEventHandler)2 TServlet (org.apache.thrift.server.TServlet)2 TThreadPoolServer (org.apache.thrift.server.TThreadPoolServer)2 TServerSocket (org.apache.thrift.transport.TServerSocket)2 TTransport (org.apache.thrift.transport.TTransport)2