Search in sources :

Example 1 with SessionHandle

use of org.apache.hive.service.cli.SessionHandle in project hive by apache.

the class ThriftBinaryCLIService method run.

@Override
public void run() {
    try {
        // Server thread pool
        String threadPoolName = "HiveServer2-Handler-Pool";
        ExecutorService executorService = new ThreadPoolExecutorWithOomHook(minWorkerThreads, maxWorkerThreads, workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new ThreadFactoryWithGarbageCleanup(threadPoolName), oomHook);
        // Thrift configs
        hiveAuthFactory = new HiveAuthFactory(hiveConf);
        TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
        TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
        TServerSocket serverSocket = null;
        List<String> sslVersionBlacklist = new ArrayList<String>();
        for (String sslVersion : hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) {
            sslVersionBlacklist.add(sslVersion);
        }
        if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
            serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum);
        } else {
            String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
            if (keyStorePath.isEmpty()) {
                throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
            }
            String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
            serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, sslVersionBlacklist);
        }
        // Server args
        int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE);
        int requestTimeout = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS);
        int beBackoffSlotLength = (int) hiveConf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS);
        TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket).processorFactory(processorFactory).transportFactory(transportFactory).protocolFactory(new TBinaryProtocol.Factory()).inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize)).requestTimeout(requestTimeout).requestTimeoutUnit(TimeUnit.SECONDS).beBackoffSlotLength(beBackoffSlotLength).beBackoffSlotLengthUnit(TimeUnit.MILLISECONDS).executorService(executorService);
        // TCP Server
        server = new TThreadPoolServer(sargs);
        server.setServerEventHandler(new TServerEventHandler() {

            @Override
            public ServerContext createContext(TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                        metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                return new ThriftCLIServerContext();
            }

            @Override
            public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) {
                Metrics metrics = MetricsFactory.getInstance();
                if (metrics != null) {
                    try {
                        metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS);
                    } catch (Exception e) {
                        LOG.warn("Error Reporting JDO operation to Metrics system", e);
                    }
                }
                ThriftCLIServerContext context = (ThriftCLIServerContext) serverContext;
                SessionHandle sessionHandle = context.getSessionHandle();
                if (sessionHandle != null) {
                    LOG.info("Session disconnected without closing properly. ");
                    try {
                        boolean close = cliService.getSessionManager().getSession(sessionHandle).getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_CLOSE_SESSION_ON_DISCONNECT);
                        LOG.info((close ? "" : "Not ") + "Closing the session: " + sessionHandle);
                        if (close) {
                            cliService.closeSession(sessionHandle);
                        }
                    } catch (HiveSQLException e) {
                        LOG.warn("Failed to close session: " + e, e);
                    }
                }
            }

            @Override
            public void preServe() {
            }

            @Override
            public void processContext(ServerContext serverContext, TTransport input, TTransport output) {
                currentServerContext.set(serverContext);
            }
        });
        String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + " on port " + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
        LOG.info(msg);
        server.serve();
    } catch (Throwable t) {
        LOG.error("Error starting HiveServer2: could not start " + ThriftBinaryCLIService.class.getSimpleName(), t);
        System.exit(-1);
    }
}
Also used : ThreadFactoryWithGarbageCleanup(org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup) TServerEventHandler(org.apache.thrift.server.TServerEventHandler) ArrayList(java.util.ArrayList) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) MetricsFactory(org.apache.hadoop.hive.common.metrics.common.MetricsFactory) TTransportFactory(org.apache.thrift.transport.TTransportFactory) TProcessorFactory(org.apache.thrift.TProcessorFactory) TServerSocket(org.apache.thrift.transport.TServerSocket) Metrics(org.apache.hadoop.hive.common.metrics.common.Metrics) TProtocol(org.apache.thrift.protocol.TProtocol) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) SessionHandle(org.apache.hive.service.cli.SessionHandle) TTransportFactory(org.apache.thrift.transport.TTransportFactory) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) ServerContext(org.apache.thrift.server.ServerContext) ExecutorService(java.util.concurrent.ExecutorService) HiveAuthFactory(org.apache.hive.service.auth.HiveAuthFactory) TTransport(org.apache.thrift.transport.TTransport) TThreadPoolServer(org.apache.thrift.server.TThreadPoolServer)

Example 2 with SessionHandle

use of org.apache.hive.service.cli.SessionHandle in project hive by apache.

the class ThriftCliServiceTestWithCookie method testOpenSession.

@Test
public void testOpenSession() throws Exception {
    // Open a new client session
    SessionHandle sessHandle = client.openSession(USERNAME, PASSWORD, new HashMap<String, String>());
    // Session handle should not be null
    assertNotNull("Session handle should not be null", sessHandle);
    // Close client session
    client.closeSession(sessHandle);
}
Also used : SessionHandle(org.apache.hive.service.cli.SessionHandle) Test(org.junit.Test)

Example 3 with SessionHandle

use of org.apache.hive.service.cli.SessionHandle in project hive by apache.

the class ThriftCLIService method getSessionHandle.

/**
 * Create a session handle
 * @param req
 * @param res
 * @return
 * @throws HiveSQLException
 * @throws LoginException
 * @throws IOException
 */
SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res) throws HiveSQLException, LoginException, IOException {
    String userName = getUserName(req);
    String ipAddress = getIpAddress();
    TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol());
    SessionHandle sessionHandle;
    if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && (userName != null)) {
        String delegationTokenStr = getDelegationToken(userName);
        sessionHandle = cliService.openSessionWithImpersonation(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration(), delegationTokenStr);
    } else {
        sessionHandle = cliService.openSession(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration());
    }
    res.setServerProtocolVersion(protocol);
    return sessionHandle;
}
Also used : TProtocolVersion(org.apache.hive.service.rpc.thrift.TProtocolVersion) SessionHandle(org.apache.hive.service.cli.SessionHandle)

Example 4 with SessionHandle

use of org.apache.hive.service.cli.SessionHandle in project hive by apache.

the class ThriftCliServiceTestWithCookie method testExecuteStatement.

/**
 * Test synchronous query execution
 * @throws Exception
 */
@Test
public void testExecuteStatement() throws Exception {
    Map<String, String> opConf = new HashMap<String, String>();
    // Open a new client session
    SessionHandle sessHandle = client.openSession(USERNAME, PASSWORD, opConf);
    // Session handle should not be null
    assertNotNull("Session handle should not be null", sessHandle);
    // Change lock manager to embedded mode
    String queryString = "SET hive.lock.manager=" + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager";
    client.executeStatement(sessHandle, queryString, opConf);
    // Drop the table if it exists
    queryString = "DROP TABLE IF EXISTS TEST_EXEC_THRIFT";
    client.executeStatement(sessHandle, queryString, opConf);
    // Create a test table
    queryString = "CREATE TABLE TEST_EXEC_THRIFT(ID STRING)";
    client.executeStatement(sessHandle, queryString, opConf);
    // Execute another query
    queryString = "SELECT ID+1 FROM TEST_EXEC_THRIFT";
    OperationHandle opHandle = client.executeStatement(sessHandle, queryString, opConf);
    assertNotNull(opHandle);
    OperationStatus opStatus = client.getOperationStatus(opHandle, false);
    assertNotNull(opStatus);
    OperationState state = opStatus.getState();
    // Expect query to be completed now
    assertEquals("Query should be finished", OperationState.FINISHED, state);
    // Cleanup
    queryString = "DROP TABLE TEST_EXEC_THRIFT";
    client.executeStatement(sessHandle, queryString, opConf);
    client.closeSession(sessHandle);
}
Also used : HashMap(java.util.HashMap) OperationStatus(org.apache.hive.service.cli.OperationStatus) SessionHandle(org.apache.hive.service.cli.SessionHandle) OperationHandle(org.apache.hive.service.cli.OperationHandle) OperationState(org.apache.hive.service.cli.OperationState) Test(org.junit.Test)

Example 5 with SessionHandle

use of org.apache.hive.service.cli.SessionHandle in project carbondata by apache.

the class HiveEmbeddedServer2 method waitForStartup.

private void waitForStartup() throws Exception {
    long timeout = TimeUnit.MINUTES.toMillis(1);
    long unitOfWait = TimeUnit.SECONDS.toMillis(1);
    CLIService hs2Client = getServiceClientInternal();
    SessionHandle sessionHandle = null;
    for (int interval = 0; interval < timeout / unitOfWait; interval++) {
        Thread.sleep(unitOfWait);
        try {
            Map<String, String> sessionConf = new HashMap<String, String>();
            sessionHandle = hs2Client.openSession("foo", "bar", sessionConf);
            return;
        } catch (Exception e) {
        // service not started yet
        } finally {
            hs2Client.closeSession(sessionHandle);
        }
    }
    throw new TimeoutException("Couldn't get a hold of HiveServer2...");
}
Also used : HashMap(java.util.HashMap) SessionHandle(org.apache.hive.service.cli.SessionHandle) CLIService(org.apache.hive.service.cli.CLIService) TimeoutException(java.util.concurrent.TimeoutException) TimeoutException(java.util.concurrent.TimeoutException)

Aggregations

SessionHandle (org.apache.hive.service.cli.SessionHandle)82 OperationHandle (org.apache.hive.service.cli.OperationHandle)47 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)39 Test (org.junit.Test)36 IOException (java.io.IOException)19 TException (org.apache.thrift.TException)18 UnknownHostException (java.net.UnknownHostException)16 LoginException (javax.security.auth.login.LoginException)16 ServiceException (org.apache.hive.service.ServiceException)16 ExploreException (co.cask.cdap.explore.service.ExploreException)12 HashMap (java.util.HashMap)12 QueryHandle (co.cask.cdap.proto.QueryHandle)11 CLIServiceClient (org.apache.hive.service.cli.CLIServiceClient)11 RowSet (org.apache.hive.service.cli.RowSet)8 TimeoutException (java.util.concurrent.TimeoutException)6 ThriftCLIServiceClient (org.apache.hive.service.cli.thrift.ThriftCLIServiceClient)6 CLIService (org.apache.hive.service.cli.CLIService)5 File (java.io.File)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 ArrayList (java.util.ArrayList)3