Search in sources :

Example 1 with PamAuthenticator

use of org.apache.hive.http.security.PamAuthenticator in project hive by apache.

the class TestActivePassiveHA method testNoConnectionOnPassive.

@Test(timeout = 60000)
public void testNoConnectionOnPassive() throws Exception {
    hiveConf1.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true);
    hiveConf2.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true);
    setPamConfs(hiveConf1);
    setPamConfs(hiveConf2);
    try {
        PamAuthenticator pamAuthenticator1 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1);
        PamAuthenticator pamAuthenticator2 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf2);
        String instanceId1 = UUID.randomUUID().toString();
        miniHS2_1.setPamAuthenticator(pamAuthenticator1);
        miniHS2_1.start(getSecureConfOverlay(instanceId1));
        String instanceId2 = UUID.randomUUID().toString();
        Map<String, String> confOverlay = getSecureConfOverlay(instanceId2);
        miniHS2_2.setPamAuthenticator(pamAuthenticator2);
        miniHS2_2.start(confOverlay);
        String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        // Don't get urls from ZK, it will actually be a service discovery URL that we don't want.
        String hs1Url = "jdbc:hive2://" + miniHS2_1.getHost() + ":" + miniHS2_1.getBinaryPort();
        // Should work.
        Connection hs2Conn = getConnection(hs1Url, System.getProperty("user.name"));
        hs2Conn.close();
        String resp = sendDelete(url1, true);
        assertTrue(resp, resp.contains("Failover successful!"));
        // wait for failover to close sessions
        while (miniHS2_1.getOpenSessionsCount() != 0) {
            Thread.sleep(100);
        }
        assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_2.isLeader());
        try {
            hs2Conn = getConnection(hs1Url, System.getProperty("user.name"));
            fail("Should throw");
        } catch (Exception e) {
            if (!e.getMessage().contains("Cannot open sessions on an inactive HS2")) {
                throw e;
            }
        }
    } finally {
        resetFailoverConfs();
    }
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) Connection(java.sql.Connection) SQLException(java.sql.SQLException) Test(org.junit.Test)

Example 2 with PamAuthenticator

use of org.apache.hive.http.security.PamAuthenticator in project hive by apache.

the class TestActivePassiveHA method testManualFailover.

@Test(timeout = 60000)
public void testManualFailover() throws Exception {
    hiveConf1.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true);
    hiveConf2.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS, true);
    setPamConfs(hiveConf1);
    setPamConfs(hiveConf2);
    PamAuthenticator pamAuthenticator1 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1);
    PamAuthenticator pamAuthenticator2 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf2);
    try {
        String instanceId1 = UUID.randomUUID().toString();
        miniHS2_1.setPamAuthenticator(pamAuthenticator1);
        miniHS2_1.start(getSecureConfOverlay(instanceId1));
        String instanceId2 = UUID.randomUUID().toString();
        Map<String, String> confOverlay = getSecureConfOverlay(instanceId2);
        confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "http");
        confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, "clidriverTest");
        miniHS2_2.setPamAuthenticator(pamAuthenticator2);
        miniHS2_2.start(confOverlay);
        String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        String url2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        // when we start miniHS2_1 will be leader (sequential start)
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        assertEquals("true", sendGet(url1, true, true));
        // trigger failover on miniHS2_1
        String resp = sendDelete(url1, true, true);
        assertTrue(resp.contains("Failover successful!"));
        // make sure miniHS2_1 is not leader
        assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
        assertEquals(false, miniHS2_1.isLeader());
        assertEquals("false", sendGet(url1, true, true));
        // make sure miniHS2_2 is the new leader
        assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_2.isLeader());
        assertEquals("true", sendGet(url2, true, true));
        // send failover request again to miniHS2_1 and get a failure
        resp = sendDelete(url1, true, true);
        assertTrue(resp.contains("Cannot failover an instance that is not a leader"));
        assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
        assertEquals(false, miniHS2_1.isLeader());
        // send failover request to miniHS2_2 and make sure miniHS2_1 takes over (returning back to leader, test listeners)
        resp = sendDelete(url2, true, true);
        assertTrue(resp.contains("Failover successful!"));
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        assertEquals("true", sendGet(url1, true, true));
        assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get());
        assertEquals("false", sendGet(url2, true, true));
        assertEquals(false, miniHS2_2.isLeader());
    } finally {
        resetFailoverConfs();
    }
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) Test(org.junit.Test)

Example 3 with PamAuthenticator

use of org.apache.hive.http.security.PamAuthenticator in project hive by apache.

the class TestActivePassiveHA method testClientConnectionsOnFailover.

@Test(timeout = 60000)
public void testClientConnectionsOnFailover() throws Exception {
    setPamConfs(hiveConf1);
    setPamConfs(hiveConf2);
    PamAuthenticator pamAuthenticator1 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1);
    PamAuthenticator pamAuthenticator2 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf2);
    try {
        String instanceId1 = UUID.randomUUID().toString();
        miniHS2_1.setPamAuthenticator(pamAuthenticator1);
        miniHS2_1.start(getSecureConfOverlay(instanceId1));
        String instanceId2 = UUID.randomUUID().toString();
        Map<String, String> confOverlay = getSecureConfOverlay(instanceId2);
        confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "http");
        confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, "clidriverTest");
        miniHS2_2.setPamAuthenticator(pamAuthenticator2);
        miniHS2_2.start(confOverlay);
        String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        String url2 = "http://localhost:" + hiveConf2.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        String zkJdbcUrl = miniHS2_1.getJdbcURL();
        String zkConnectString = zkServer.getConnectString();
        assertTrue(zkJdbcUrl.contains(zkConnectString));
        // when we start miniHS2_1 will be leader (sequential start)
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        assertEquals("true", sendGet(url1, true));
        // before failover, check if we are getting connection from miniHS2_1
        String hs2_1_directUrl = "jdbc:hive2://" + miniHS2_1.getHost() + ":" + miniHS2_1.getBinaryPort() + "/default;serviceDiscoveryMode=" + serviceDiscoveryMode + ";zooKeeperNamespace=" + zkHANamespace + ";";
        String parsedUrl = HiveConnection.getAllUrls(zkJdbcUrl).get(0).getJdbcUriString();
        assertEquals(hs2_1_directUrl, parsedUrl);
        hs2Conn = getConnection(zkJdbcUrl, System.getProperty("user.name"));
        while (miniHS2_1.getOpenSessionsCount() != 1) {
            Thread.sleep(100);
        }
        // trigger failover on miniHS2_1 and make sure the connections are closed
        String resp = sendDelete(url1, true);
        assertTrue(resp.contains("Failover successful!"));
        // wait for failover to close sessions
        while (miniHS2_1.getOpenSessionsCount() != 0) {
            Thread.sleep(100);
        }
        // make sure miniHS2_1 is not leader
        assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
        assertEquals(false, miniHS2_1.isLeader());
        assertEquals("false", sendGet(url1, true));
        // make sure miniHS2_2 is the new leader
        assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_2.isLeader());
        assertEquals("true", sendGet(url2, true));
        // when we make a new connection we should get it from miniHS2_2 this time
        String hs2_2_directUrl = "jdbc:hive2://" + miniHS2_2.getHost() + ":" + miniHS2_2.getHttpPort() + "/default;serviceDiscoveryMode=" + serviceDiscoveryMode + ";zooKeeperNamespace=" + zkHANamespace + ";";
        parsedUrl = HiveConnection.getAllUrls(zkJdbcUrl).get(0).getJdbcUriString();
        assertEquals(hs2_2_directUrl, parsedUrl);
        hs2Conn = getConnection(zkJdbcUrl, System.getProperty("user.name"));
        while (miniHS2_2.getOpenSessionsCount() != 1) {
            Thread.sleep(100);
        }
        // send failover request again to miniHS2_1 and get a failure
        resp = sendDelete(url1, true);
        assertTrue(resp.contains("Cannot failover an instance that is not a leader"));
        assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
        assertEquals(false, miniHS2_1.isLeader());
        // send failover request to miniHS2_2 and make sure miniHS2_1 takes over (returning back to leader, test listeners)
        resp = sendDelete(url2, true);
        assertTrue(resp.contains("Failover successful!"));
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        assertEquals("true", sendGet(url1, true));
        assertEquals(true, miniHS2_2.getNotLeaderTestFuture().get());
        assertEquals("false", sendGet(url2, true));
        assertEquals(false, miniHS2_2.isLeader());
        // make sure miniHS2_2 closes all its connections
        while (miniHS2_2.getOpenSessionsCount() != 0) {
            Thread.sleep(100);
        }
        // new connections goes to miniHS2_1 now
        hs2Conn = getConnection(zkJdbcUrl, System.getProperty("user.name"));
        while (miniHS2_1.getOpenSessionsCount() != 1) {
            Thread.sleep(100);
        }
    } finally {
        // revert configs to not affect other tests
        unsetPamConfs(hiveConf1);
        unsetPamConfs(hiveConf2);
    }
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) Test(org.junit.Test)

Example 4 with PamAuthenticator

use of org.apache.hive.http.security.PamAuthenticator in project hive by apache.

the class TestActivePassiveHA method testManualFailoverUnauthorized.

@Test(timeout = 60000)
public void testManualFailoverUnauthorized() throws Exception {
    setPamConfs(hiveConf1);
    PamAuthenticator pamAuthenticator1 = new TestHS2HttpServerPam.TestPamAuthenticator(hiveConf1);
    try {
        String instanceId1 = UUID.randomUUID().toString();
        miniHS2_1.setPamAuthenticator(pamAuthenticator1);
        miniHS2_1.start(getSecureConfOverlay(instanceId1));
        // dummy HS2 instance just to trigger failover
        String instanceId2 = UUID.randomUUID().toString();
        Map<String, String> confOverlay = getSecureConfOverlay(instanceId2);
        confOverlay.put(ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname, "http");
        confOverlay.put(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname, "clidriverTest");
        miniHS2_2.start(confOverlay);
        String url1 = "http://localhost:" + hiveConf1.get(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname) + "/leader";
        // when we start miniHS2_1 will be leader (sequential start)
        assertEquals(true, miniHS2_1.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_1.isLeader());
        assertEquals("true", sendGet(url1, true));
        // trigger failover on miniHS2_1 without authorization header
        assertTrue(sendDelete(url1, false).contains("Unauthorized"));
        assertTrue(sendDelete(url1, true).contains("Failover successful!"));
        assertEquals(true, miniHS2_1.getNotLeaderTestFuture().get());
        assertEquals(false, miniHS2_1.isLeader());
        assertEquals(true, miniHS2_2.getIsLeaderTestFuture().get());
        assertEquals(true, miniHS2_2.isLeader());
    } finally {
        // revert configs to not affect other tests
        unsetPamConfs(hiveConf1);
    }
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) Test(org.junit.Test)

Example 5 with PamAuthenticator

use of org.apache.hive.http.security.PamAuthenticator in project hive by apache.

the class HiveServer2 method init.

@Override
public synchronized void init(HiveConf hiveConf) {
    // Initialize metrics first, as some metrics are for initialization stuff.
    try {
        if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_METRICS_ENABLED)) {
            MetricsFactory.init(hiveConf);
        }
    } catch (Throwable t) {
        LOG.warn("Could not initiate the HiveServer2 Metrics system.  Metrics may not be reported.", t);
    }
    // Do not allow sessions - leader election or initialization will allow them for an active HS2.
    cliService = new CLIService(this, false);
    addService(cliService);
    final HiveServer2 hiveServer2 = this;
    boolean isHttpTransportMode = isHttpTransportMode(hiveConf);
    boolean isAllTransportMode = isAllTransportMode(hiveConf);
    if (isHttpTransportMode || isAllTransportMode) {
        thriftCLIService = new ThriftHttpCLIService(cliService);
        addService(thriftCLIService);
    }
    if (!isHttpTransportMode || isAllTransportMode) {
        thriftCLIService = new ThriftBinaryCLIService(cliService);
        // thriftCliService instance is used for zookeeper purposes
        addService(thriftCLIService);
    }
    super.init(hiveConf);
    // Set host name in conf
    try {
        hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getServerHost());
    } catch (Throwable t) {
        throw new Error("Unable to initialize HiveServer2", t);
    }
    if (HiveConf.getBoolVar(hiveConf, ConfVars.LLAP_HS2_ENABLE_COORDINATOR)) {
        // See method comment.
        try {
            LlapCoordinator.initializeInstance(hiveConf);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
    // Trigger the creation of LLAP registry client, if in use. Clients may be using a different
    // cluster than the default one, but at least for the default case we'd have it covered.
    String llapHosts = HiveConf.getVar(hiveConf, HiveConf.ConfVars.LLAP_DAEMON_SERVICE_HOSTS);
    if (llapHosts != null && !llapHosts.isEmpty()) {
        LlapRegistryService.getClient(hiveConf);
    }
    // Initialize metadata provider class and trimmer
    CalcitePlanner.warmup();
    try {
        sessionHive = Hive.get(hiveConf);
    } catch (HiveException e) {
        throw new RuntimeException("Failed to get metastore connection", e);
    }
    // Create views registry
    HiveMaterializedViewsRegistry.get().init();
    StatsSources.initialize(hiveConf);
    if (hiveConf.getBoolVar(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED)) {
        scheduledQueryService = ScheduledQueryExecutionService.startScheduledQueryExecutorService(hiveConf);
    }
    // Setup cache if enabled.
    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED)) {
        try {
            QueryResultsCache.initialize(hiveConf);
        } catch (Exception err) {
            throw new RuntimeException("Error initializing the query results cache", err);
        }
    }
    // setup metastore client cache
    if (hiveConf.getBoolVar(ConfVars.MSC_CACHE_ENABLED)) {
        HiveMetaStoreClientWithLocalCache.init(hiveConf);
    }
    try {
        NotificationEventPoll.initialize(hiveConf);
    } catch (Exception err) {
        throw new RuntimeException("Error initializing notification event poll", err);
    }
    wmQueue = hiveConf.get(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE.varname, "").trim();
    this.zooKeeperAclProvider = getACLProvider(hiveConf);
    this.serviceDiscovery = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY);
    this.activePassiveHA = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_ENABLE);
    try {
        if (serviceDiscovery) {
            serviceUri = getServerInstanceURI();
            addConfsToPublish(hiveConf, confsToPublish, serviceUri);
            if (activePassiveHA) {
                hiveConf.set(INSTANCE_URI_CONFIG, serviceUri);
                leaderLatchListener = new HS2LeaderLatchListener(this, SessionState.get());
                leaderActionsExecutorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Leader Actions Handler Thread").build());
                hs2HARegistry = HS2ActivePassiveHARegistry.create(hiveConf, false);
            }
        }
    } catch (Exception e) {
        throw new ServiceException(e);
    }
    try {
        logCompactionParameters(hiveConf);
        maybeStartCompactorThreads(hiveConf);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    // Setup web UI
    final int webUIPort;
    final String webHost;
    try {
        webUIPort = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT);
        webHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST);
        // We disable web UI in tests unless the test is explicitly setting a
        // unique web ui port so that we don't mess up ptests.
        boolean uiDisabledInTest = hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST) && (webUIPort == Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue()));
        if (uiDisabledInTest) {
            LOG.info("Web UI is disabled in test mode since webui port was not specified");
        } else {
            if (webUIPort <= 0) {
                LOG.info("Web UI is disabled since port is set to " + webUIPort);
            } else {
                LOG.info("Starting Web UI on port " + webUIPort);
                HttpServer.Builder builder = new HttpServer.Builder("hiveserver2");
                builder.setPort(webUIPort).setConf(hiveConf);
                builder.setHost(webHost);
                builder.setMaxThreads(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS));
                builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE));
                // SessionManager is initialized
                builder.setContextAttribute("hive.sm", cliService.getSessionManager());
                hiveConf.set("startcode", String.valueOf(System.currentTimeMillis()));
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
                    String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH);
                    if (Strings.isBlank(keyStorePath)) {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
                    }
                    builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword(hiveConf, ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname));
                    builder.setKeyStorePath(keyStorePath);
                    builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE));
                    builder.setKeyManagerFactoryAlgorithm(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM));
                    builder.setExcludeCiphersuites(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES));
                    builder.setUseSSL(true);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) {
                    String spnegoPrincipal = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL);
                    String spnegoKeytab = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB);
                    if (Strings.isBlank(spnegoPrincipal) || Strings.isBlank(spnegoKeytab)) {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname + " Not configured for SPNEGO authentication");
                    }
                    builder.setSPNEGOPrincipal(spnegoPrincipal);
                    builder.setSPNEGOKeytab(spnegoKeytab);
                    builder.setUseSPNEGO(true);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) {
                    builder.setEnableCORS(true);
                    String allowedOrigins = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS);
                    String allowedMethods = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS);
                    String allowedHeaders = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS);
                    if (Strings.isBlank(allowedOrigins) || Strings.isBlank(allowedMethods) || Strings.isBlank(allowedHeaders)) {
                        throw new IllegalArgumentException("CORS enabled. But " + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/" + " is not configured");
                    }
                    builder.setAllowedOrigins(allowedOrigins);
                    builder.setAllowedMethods(allowedMethods);
                    builder.setAllowedHeaders(allowedHeaders);
                    LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {} allowed-headers: {}", allowedOrigins, allowedMethods, allowedHeaders);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)) {
                    builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE));
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) {
                    if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
                        String hiveServer2PamServices = hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES);
                        if (hiveServer2PamServices == null || hiveServer2PamServices.isEmpty()) {
                            throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not configured.");
                        }
                        builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator);
                        builder.setUsePAM(true);
                    } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
                        builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator);
                        builder.setUsePAM(true);
                    } else {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used.");
                    }
                }
                if (serviceDiscovery && activePassiveHA) {
                    builder.setContextAttribute("hs2.isLeader", isLeader);
                    builder.setContextAttribute("hs2.failover.callback", new FailoverHandlerCallback(hs2HARegistry));
                    builder.setContextAttribute("hiveconf", hiveConf);
                    builder.addServlet("leader", HS2LeadershipStatus.class);
                    builder.addServlet("peers", HS2Peers.class);
                }
                builder.addServlet("llap", LlapServlet.class);
                builder.addServlet("jdbcjar", JdbcJarDownloadServlet.class);
                builder.setContextRootRewriteTarget("/hiveserver2.jsp");
                webServer = builder.build();
                webServer.addServlet("query_page", "/query_page.html", QueryProfileServlet.class);
                webServer.addServlet("api", "/api/*", QueriesRESTfulAPIServlet.class);
            }
        }
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
    // Add a shutdown hook for catching SIGTERM & SIGINT
    ShutdownHookManager.addShutdownHook(() -> hiveServer2.stop());
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) OptionBuilder(org.apache.commons.cli.OptionBuilder) IOException(java.io.IOException) ThriftHttpCLIService(org.apache.hive.service.cli.thrift.ThriftHttpCLIService) ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) ThriftCLIService(org.apache.hive.service.cli.thrift.ThriftCLIService) CLIService(org.apache.hive.service.cli.CLIService) ServiceException(org.apache.hive.service.ServiceException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) KeeperException(org.apache.zookeeper.KeeperException) ThriftHttpCLIService(org.apache.hive.service.cli.thrift.ThriftHttpCLIService) ServiceException(org.apache.hive.service.ServiceException) HttpServer(org.apache.hive.http.HttpServer) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder)

Aggregations

PamAuthenticator (org.apache.hive.http.security.PamAuthenticator)5 Test (org.junit.Test)4 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)1 IOException (java.io.IOException)1 Connection (java.sql.Connection)1 SQLException (java.sql.SQLException)1 OptionBuilder (org.apache.commons.cli.OptionBuilder)1 ParseException (org.apache.commons.cli.ParseException)1 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 HttpServer (org.apache.hive.http.HttpServer)1 ServiceException (org.apache.hive.service.ServiceException)1 CLIService (org.apache.hive.service.cli.CLIService)1 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)1 ThriftBinaryCLIService (org.apache.hive.service.cli.thrift.ThriftBinaryCLIService)1 ThriftCLIService (org.apache.hive.service.cli.thrift.ThriftCLIService)1 ThriftHttpCLIService (org.apache.hive.service.cli.thrift.ThriftHttpCLIService)1 KeeperException (org.apache.zookeeper.KeeperException)1