use of org.apache.hive.service.ServiceException in project hive by apache.
the class CLIService method init.
@Override
public synchronized void init(HiveConf hiveConf) {
this.hiveConf = hiveConf;
sessionManager = new SessionManager(hiveServer2);
defaultFetchRows = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE);
addService(sessionManager);
// If the hadoop cluster is secure, do a kerberos login for the service from the keytab
if (UserGroupInformation.isSecurityEnabled()) {
try {
HiveAuthFactory.loginFromKeytab(hiveConf);
this.serviceUGI = Utils.getUGI();
} catch (IOException e) {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
} catch (LoginException e) {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
}
// Also try creating a UGI object for the SPNego principal
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
if (principal.isEmpty() || keyTabFile.isEmpty()) {
LOG.info("SPNego httpUGI not created, spNegoPrincipal: " + principal + ", ketabFile: " + keyTabFile);
} else {
try {
this.httpUGI = HiveAuthFactory.loginFromSpnegoKeytabAndReturnUGI(hiveConf);
LOG.info("SPNego httpUGI successfully created.");
} catch (IOException e) {
LOG.warn("SPNego httpUGI creation failed: ", e);
}
}
}
// creates connection to HMS and thus *must* occur after kerberos login above
try {
applyAuthorizationConfigPolicy(hiveConf);
} catch (Exception e) {
throw new RuntimeException("Error applying authorization policy on hive configuration: " + e.getMessage(), e);
}
setupBlockedUdfs();
super.init(hiveConf);
}
use of org.apache.hive.service.ServiceException in project hive by apache.
the class HiveServer2 method start.
@Override
public synchronized void start() {
super.start();
// If we're supporting dynamic service discovery, we'll add the service uri for this
// HiveServer2 instance to Zookeeper as a znode.
HiveConf hiveConf = this.getHiveConf();
if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY)) {
try {
addServerInstanceToZooKeeper(hiveConf);
} catch (Exception e) {
LOG.error("Error adding this HiveServer2 instance to ZooKeeper: ", e);
throw new ServiceException(e);
}
}
if (webServer != null) {
try {
webServer.start();
LOG.info("Web UI has started on port " + webServer.getPort());
} catch (Exception e) {
LOG.error("Error starting Web UI: ", e);
throw new ServiceException(e);
}
}
}
use of org.apache.hive.service.ServiceException in project hive by apache.
the class ThriftCLIService method init.
@Override
public synchronized void init(HiveConf hiveConf) {
this.hiveConf = hiveConf;
String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
if (hiveHost == null) {
hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
}
try {
serverIPAddress = ServerUtils.getHostAddress(hiveHost);
} catch (UnknownHostException e) {
throw new ServiceException(e);
}
// Initialize common server configs needed in both binary & http modes
String portString;
// HTTP mode
if (HiveServer2.isHTTPTransportMode(hiveConf)) {
workerKeepAliveTime = hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
if (portString != null) {
portNum = Integer.parseInt(portString);
} else {
portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
}
} else // Binary mode
{
workerKeepAliveTime = hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
if (portString != null) {
portNum = Integer.parseInt(portString);
} else {
portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
}
}
minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
super.init(hiveConf);
}
use of org.apache.hive.service.ServiceException in project hive by apache.
the class HiveServer2 method init.
@Override
public synchronized void init(HiveConf hiveConf) {
//Initialize metrics first, as some metrics are for initialization stuff.
try {
if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_METRICS_ENABLED)) {
MetricsFactory.init(hiveConf);
}
} catch (Throwable t) {
LOG.warn("Could not initiate the HiveServer2 Metrics system. Metrics may not be reported.", t);
}
cliService = new CLIService(this);
addService(cliService);
final HiveServer2 hiveServer2 = this;
Runnable oomHook = new Runnable() {
@Override
public void run() {
hiveServer2.stop();
}
};
if (isHTTPTransportMode(hiveConf)) {
thriftCLIService = new ThriftHttpCLIService(cliService, oomHook);
} else {
thriftCLIService = new ThriftBinaryCLIService(cliService, oomHook);
}
addService(thriftCLIService);
super.init(hiveConf);
// Set host name in hiveConf
try {
hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getServerHost());
} catch (Throwable t) {
throw new Error("Unable to intitialize HiveServer2", t);
}
if (HiveConf.getBoolVar(hiveConf, ConfVars.LLAP_HS2_ENABLE_COORDINATOR)) {
// See method comment.
try {
LlapCoordinator.initializeInstance(hiveConf);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// Trigger the creation of LLAP registry client, if in use. Clients may be using a different
// cluster than the default one, but at least for the default case we'd have it covered.
String llapHosts = HiveConf.getVar(hiveConf, HiveConf.ConfVars.LLAP_DAEMON_SERVICE_HOSTS);
if (llapHosts != null && !llapHosts.isEmpty()) {
LlapRegistryService.getClient(hiveConf);
}
// Create views registry
try {
Hive sessionHive = Hive.get(hiveConf);
HiveMaterializedViewsRegistry.get().init(sessionHive);
} catch (HiveException e) {
throw new RuntimeException("Failed to get metastore connection", e);
}
// Setup web UI
try {
int webUIPort = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT);
// We disable web UI in tests unless the test is explicitly setting a
// unique web ui port so that we don't mess up ptests.
boolean uiDisabledInTest = hiveConf.getBoolVar(ConfVars.HIVE_IN_TEST) && (webUIPort == Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue()));
if (uiDisabledInTest) {
LOG.info("Web UI is disabled in test mode since webui port was not specified");
} else {
if (webUIPort <= 0) {
LOG.info("Web UI is disabled since port is set to " + webUIPort);
} else {
LOG.info("Starting Web UI on port " + webUIPort);
HttpServer.Builder builder = new HttpServer.Builder("hiveserver2");
builder.setPort(webUIPort).setConf(hiveConf);
builder.setHost(hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_BIND_HOST));
builder.setMaxThreads(hiveConf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_THREADS));
builder.setAdmins(hiveConf.getVar(ConfVars.USERS_IN_ADMIN_ROLE));
// SessionManager is initialized
builder.setContextAttribute("hive.sm", cliService.getSessionManager());
hiveConf.set("startcode", String.valueOf(System.currentTimeMillis()));
if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL)) {
String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH);
if (Strings.isBlank(keyStorePath)) {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH.varname + " Not configured for SSL connection");
}
builder.setKeyStorePassword(ShimLoader.getHadoopShims().getPassword(hiveConf, ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD.varname));
builder.setKeyStorePath(keyStorePath);
builder.setUseSSL(true);
}
if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SPNEGO)) {
String spnegoPrincipal = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL);
String spnegoKeytab = hiveConf.getVar(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB);
if (Strings.isBlank(spnegoPrincipal) || Strings.isBlank(spnegoKeytab)) {
throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_PRINCIPAL.varname + "/" + ConfVars.HIVE_SERVER2_WEBUI_SPNEGO_KEYTAB.varname + " Not configured for SPNEGO authentication");
}
builder.setSPNEGOPrincipal(spnegoPrincipal);
builder.setSPNEGOKeytab(spnegoKeytab);
builder.setUseSPNEGO(true);
}
builder.addServlet("llap", LlapServlet.class);
builder.setContextRootRewriteTarget("/hiveserver2.jsp");
webServer = builder.build();
webServer.addServlet("query_page", "/query_page", QueryProfileServlet.class);
}
}
} catch (IOException ie) {
throw new ServiceException(ie);
}
// Add a shutdown hook for catching SIGTERM & SIGINT
ShutdownHookManager.addShutdownHook(new Runnable() {
@Override
public void run() {
hiveServer2.stop();
}
});
}
Aggregations