Search in sources :

Example 1 with ThriftCLIService

use of org.apache.hive.service.cli.thrift.ThriftCLIService in project hive by apache.

the class TestPlainSaslHelper method testDoAsSetting.

/**
 * Test setting {@link HiveConf.ConfVars}} config parameter
 *   HIVE_SERVER2_ENABLE_DOAS for unsecure mode
 */
@Test
public void testDoAsSetting() {
    HiveConf hconf = new HiveConf();
    hconf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    assertTrue("default value of hive server2 doAs should be true", hconf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS));
    CLIService cliService = new CLIService(null, true);
    cliService.init(hconf);
    ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService);
    tcliService.init(hconf);
    TProcessorFactory procFactory = PlainSaslHelper.getPlainProcessorFactory(tcliService);
    assertEquals("doAs enabled processor for unsecure mode", procFactory.getProcessor(null).getClass(), TSetIpAddressProcessor.class);
}
Also used : ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) ThriftCLIService(org.apache.hive.service.cli.thrift.ThriftCLIService) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ThriftCLIService(org.apache.hive.service.cli.thrift.ThriftCLIService) CLIService(org.apache.hive.service.cli.CLIService) ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) TProcessorFactory(org.apache.thrift.TProcessorFactory) Test(org.junit.Test)

Example 2 with ThriftCLIService

use of org.apache.hive.service.cli.thrift.ThriftCLIService in project hive by apache.

the class HiveServer2 method init.

@Override
public synchronized void init(HiveConf hiveConf) {
    // Initialize metrics first, as some metrics are for initialization stuff.
    try {
        if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_METRICS_ENABLED)) {
            MetricsFactory.init(hiveConf);
        }
    } catch (Throwable t) {
        LOG.warn("Could not initiate the HiveServer2 Metrics system.  Metrics may not be reported.", t);
    }
    // Do not allow sessions - leader election or initialization will allow them for an active HS2.
    cliService = new CLIService(this, false);
    addService(cliService);
    final HiveServer2 hiveServer2 = this;
    boolean isHttpTransportMode = isHttpTransportMode(hiveConf);
    boolean isAllTransportMode = isAllTransportMode(hiveConf);
    if (isHttpTransportMode || isAllTransportMode) {
        thriftCLIService = new ThriftHttpCLIService(cliService);
        addService(thriftCLIService);
    }
    if (!isHttpTransportMode || isAllTransportMode) {
        thriftCLIService = new ThriftBinaryCLIService(cliService);
        // thriftCliService instance is used for zookeeper purposes
        addService(thriftCLIService);
    }
    super.init(hiveConf);
    // Set host name in conf
    try {
        hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getServerHost());
    } catch (Throwable t) {
        throw new Error("Unable to initialize HiveServer2", t);
    }
    if (HiveConf.getBoolVar(hiveConf, ConfVars.LLAP_HS2_ENABLE_COORDINATOR)) {
        // See method comment.
        try {
            LlapCoordinator.initializeInstance(hiveConf);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
    // Trigger the creation of LLAP registry client, if in use. Clients may be using a different
    // cluster than the default one, but at least for the default case we'd have it covered.
    String llapHosts = HiveConf.getVar(hiveConf, HiveConf.ConfVars.LLAP_DAEMON_SERVICE_HOSTS);
    if (llapHosts != null && !llapHosts.isEmpty()) {
        LlapRegistryService.getClient(hiveConf);
    }
    // Initialize metadata provider class and trimmer
    CalcitePlanner.warmup();
    try {
        sessionHive = Hive.get(hiveConf);
    } catch (HiveException e) {
        throw new RuntimeException("Failed to get metastore connection", e);
    }
    // Create views registry
    HiveMaterializedViewsRegistry.get().init();
    StatsSources.initialize(hiveConf);
    if (hiveConf.getBoolVar(ConfVars.HIVE_SCHEDULED_QUERIES_EXECUTOR_ENABLED)) {
        scheduledQueryService = ScheduledQueryExecutionService.startScheduledQueryExecutorService(hiveConf);
    }
    // Setup cache if enabled.
    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_QUERY_RESULTS_CACHE_ENABLED)) {
        try {
            QueryResultsCache.initialize(hiveConf);
        } catch (Exception err) {
            throw new RuntimeException("Error initializing the query results cache", err);
        }
    }
    // setup metastore client cache
    if (hiveConf.getBoolVar(ConfVars.MSC_CACHE_ENABLED)) {
        HiveMetaStoreClientWithLocalCache.init(hiveConf);
    }
    try {
        NotificationEventPoll.initialize(hiveConf);
    } catch (Exception err) {
        throw new RuntimeException("Error initializing notification event poll", err);
    }
    wmQueue = hiveConf.get(ConfVars.HIVE_SERVER2_TEZ_INTERACTIVE_QUEUE.varname, "").trim();
    this.zooKeeperAclProvider = getACLProvider(hiveConf);
    this.serviceDiscovery = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY);
    this.activePassiveHA = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_ENABLE);
    try {
        if (serviceDiscovery) {
            serviceUri = getServerInstanceURI();
            addConfsToPublish(hiveConf, confsToPublish, serviceUri);
            if (activePassiveHA) {
                hiveConf.set(INSTANCE_URI_CONFIG, serviceUri);
                leaderLatchListener = new HS2LeaderLatchListener(this, SessionState.get());
                leaderActionsExecutorService = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Leader Actions Handler Thread").build());
                hs2HARegistry = HS2ActivePassiveHARegistry.create(hiveConf, false);
            }
        }
    } catch (Exception e) {
        throw new ServiceException(e);
    }
    try {
        logCompactionParameters(hiveConf);
        maybeStartCompactorThreads(hiveConf);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    // Setup web UI
    final int webUIPort;
    final String webHost;
    try {
        webUIPort = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT);
        webHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST);
        // We disable web UI in tests unless the test is explicitly setting a
        // unique web ui port so that we don't mess up ptests.
        boolean uiDisabledInTest = hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST) && (webUIPort == Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue()));
        if (uiDisabledInTest) {
            LOG.info("Web UI is disabled in test mode since webui port was not specified");
        } else {
            if (webUIPort <= 0) {
                LOG.info("Web UI is disabled since port is set to " + webUIPort);
            } else {
                LOG.info("Starting Web UI on port " + webUIPort);
                HttpServer.Builder builder = new HttpServer.Builder("hiveserver2");
                builder.setPort(webUIPort).setConf(hiveConf);
                builder.setHost(webHost);
                builder.setMaxThreads(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS));
                builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE));
                // SessionManager is initialized
                builder.setContextAttribute("hive.sm", cliService.getSessionManager());
                hiveConf.set("startcode", String.valueOf(System.currentTimeMillis()));
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
                    String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH);
                    if (Strings.isBlank(keyStorePath)) {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
                    }
                    builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword(hiveConf, ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname));
                    builder.setKeyStorePath(keyStorePath);
                    builder.setKeyStoreType(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_TYPE));
                    builder.setKeyManagerFactoryAlgorithm(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYMANAGERFACTORY_ALGORITHM));
                    builder.setExcludeCiphersuites(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_EXCLUDE_CIPHERSUITES));
                    builder.setUseSSL(true);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) {
                    String spnegoPrincipal = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL);
                    String spnegoKeytab = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB);
                    if (Strings.isBlank(spnegoPrincipal) || Strings.isBlank(spnegoKeytab)) {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname + " Not configured for SPNEGO authentication");
                    }
                    builder.setSPNEGOPrincipal(spnegoPrincipal);
                    builder.setSPNEGOKeytab(spnegoKeytab);
                    builder.setUseSPNEGO(true);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_ENABLE_CORS)) {
                    builder.setEnableCORS(true);
                    String allowedOrigins = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS);
                    String allowedMethods = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS);
                    String allowedHeaders = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS);
                    if (Strings.isBlank(allowedOrigins) || Strings.isBlank(allowedMethods) || Strings.isBlank(allowedHeaders)) {
                        throw new IllegalArgumentException("CORS enabled. But " + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_METHODS.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_CORS_ALLOWED_HEADERS.varname + "/" + " is not configured");
                    }
                    builder.setAllowedOrigins(allowedOrigins);
                    builder.setAllowedMethods(allowedMethods);
                    builder.setAllowedHeaders(allowedHeaders);
                    LOG.info("CORS enabled - allowed-origins: {} allowed-methods: {} allowed-headers: {}", allowedOrigins, allowedMethods, allowedHeaders);
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_ENABLED)) {
                    builder.configureXFrame(true).setXFrameOption(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_XFRAME_VALUE));
                }
                if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM)) {
                    if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
                        String hiveServer2PamServices = hiveConf.getVar(ConfVars.HIVE_SERVER2_PAM_SERVICES);
                        if (hiveServer2PamServices == null || hiveServer2PamServices.isEmpty()) {
                            throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not configured.");
                        }
                        builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator);
                        builder.setUsePAM(true);
                    } else if (hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST)) {
                        builder.setPAMAuthenticator(pamAuthenticator == null ? new PamAuthenticator(hiveConf) : pamAuthenticator);
                        builder.setUsePAM(true);
                    } else {
                        throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used.");
                    }
                }
                if (serviceDiscovery && activePassiveHA) {
                    builder.setContextAttribute("hs2.isLeader", isLeader);
                    builder.setContextAttribute("hs2.failover.callback", new FailoverHandlerCallback(hs2HARegistry));
                    builder.setContextAttribute("hiveconf", hiveConf);
                    builder.addServlet("leader", HS2LeadershipStatus.class);
                    builder.addServlet("peers", HS2Peers.class);
                }
                builder.addServlet("llap", LlapServlet.class);
                builder.addServlet("jdbcjar", JdbcJarDownloadServlet.class);
                builder.setContextRootRewriteTarget("/hiveserver2.jsp");
                webServer = builder.build();
                webServer.addServlet("query_page", "/query_page.html", QueryProfileServlet.class);
                webServer.addServlet("api", "/api/*", QueriesRESTfulAPIServlet.class);
            }
        }
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
    // Add a shutdown hook for catching SIGTERM & SIGINT
    ShutdownHookManager.addShutdownHook(() -> hiveServer2.stop());
}
Also used : PamAuthenticator(org.apache.hive.http.security.PamAuthenticator) ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) OptionBuilder(org.apache.commons.cli.OptionBuilder) IOException(java.io.IOException) ThriftHttpCLIService(org.apache.hive.service.cli.thrift.ThriftHttpCLIService) ThriftBinaryCLIService(org.apache.hive.service.cli.thrift.ThriftBinaryCLIService) ThriftCLIService(org.apache.hive.service.cli.thrift.ThriftCLIService) CLIService(org.apache.hive.service.cli.CLIService) ServiceException(org.apache.hive.service.ServiceException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) KeeperException(org.apache.zookeeper.KeeperException) ThriftHttpCLIService(org.apache.hive.service.cli.thrift.ThriftHttpCLIService) ServiceException(org.apache.hive.service.ServiceException) HttpServer(org.apache.hive.http.HttpServer) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder)

Aggregations

CLIService (org.apache.hive.service.cli.CLIService)2 ThriftBinaryCLIService (org.apache.hive.service.cli.thrift.ThriftBinaryCLIService)2 ThriftCLIService (org.apache.hive.service.cli.thrift.ThriftCLIService)2 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)1 IOException (java.io.IOException)1 OptionBuilder (org.apache.commons.cli.OptionBuilder)1 ParseException (org.apache.commons.cli.ParseException)1 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)1 HiveConf (org.apache.hadoop.hive.conf.HiveConf)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 HttpServer (org.apache.hive.http.HttpServer)1 PamAuthenticator (org.apache.hive.http.security.PamAuthenticator)1 ServiceException (org.apache.hive.service.ServiceException)1 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)1 ThriftHttpCLIService (org.apache.hive.service.cli.thrift.ThriftHttpCLIService)1 TProcessorFactory (org.apache.thrift.TProcessorFactory)1 KeeperException (org.apache.zookeeper.KeeperException)1 Test (org.junit.Test)1