use of org.apache.hive.service.rpc.thrift.TOpenSessionReq in project hive by apache.
the class HiveConnection method openSession.
private void openSession() throws SQLException {
LOG.debug("Opening Hive connection session");
TOpenSessionReq openReq = new TOpenSessionReq();
Map<String, String> openConf = new HashMap<>();
// for remote JDBC client, try to set the conf var using 'set foo=bar'
for (Entry<String, String> hiveConf : connParams.getHiveConfs().entrySet()) {
openConf.put("set:hiveconf:" + hiveConf.getKey(), hiveConf.getValue());
}
// For remote JDBC client, try to set the hive var using 'set hivevar:key=value'
for (Entry<String, String> hiveVar : connParams.getHiveVars().entrySet()) {
openConf.put("set:hivevar:" + hiveVar.getKey(), hiveVar.getValue());
}
// switch the database
LOG.debug("Default database: {}", connParams.getDbName());
openConf.put("use:database", connParams.getDbName());
if (wmPool != null) {
openConf.put("set:hivevar:wmpool", wmPool);
}
if (wmApp != null) {
openConf.put("set:hivevar:wmapp", wmApp);
}
// set the session configuration
if (sessConfMap.containsKey(HiveAuthConstants.HS2_PROXY_USER)) {
openConf.put(HiveAuthConstants.HS2_PROXY_USER, sessConfMap.get(HiveAuthConstants.HS2_PROXY_USER));
}
// set create external purge table by default
if (sessConfMap.containsKey(JdbcConnectionParams.CREATE_TABLE_AS_EXTERNAL)) {
openConf.put("set:hiveconf:hive.create.as.external.legacy", sessConfMap.get(JdbcConnectionParams.CREATE_TABLE_AS_EXTERNAL).toLowerCase());
}
if (isHplSqlMode()) {
openConf.put("set:hivevar:mode", HPLSQL);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Dumping initial configuration...");
for (Map.Entry<String, String> entry : openConf.entrySet()) {
LOG.debug("{}={}", entry.getKey(), entry.getValue());
}
}
openReq.setConfiguration(openConf);
// Store the user name in the open request in case no non-sasl authentication
if (JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
openReq.setUsername(sessConfMap.get(JdbcConnectionParams.AUTH_USER));
openReq.setPassword(sessConfMap.get(JdbcConnectionParams.AUTH_PASSWD));
}
// explicitly do a HTTP post request and get the response.
try {
int numRetry = 1;
if (isBrowserAuthMode()) {
numRetry = 2;
browserClient.startListening();
}
for (int i = 0; i < numRetry; i++) {
try {
openSession(openReq);
} catch (TException e) {
if (isSamlRedirect(e)) {
boolean success = doBrowserSSO();
if (!success) {
String msg = browserClient.getServerResponse() == null || browserClient.getServerResponse().getMsg() == null ? "" : browserClient.getServerResponse().getMsg();
throw new SQLException("Could not establish connection to " + jdbcUriString + ": " + msg, " 08S01", e);
}
} else {
throw new SQLException("Could not establish connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e);
}
}
}
} catch (HiveJdbcBrowserException e) {
throw new SQLException("Could not establish connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e);
} finally {
if (browserClient != null) {
try {
browserClient.close();
} catch (IOException e) {
LOG.error("Unable to close the browser SSO client : " + e.getMessage(), e);
}
}
}
isClosed = false;
}
use of org.apache.hive.service.rpc.thrift.TOpenSessionReq in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method verifyForwardedHeaders.
private void verifyForwardedHeaders(ArrayList<String> headerIPs, String cmd) throws Exception {
TTransport transport;
DefaultHttpClient hClient = new DefaultHttpClient();
String httpUrl = getHttpUrl();
// add an interceptor that adds the X-Forwarded-For header with given ips
if (!headerIPs.isEmpty()) {
Header xForwardHeader = new BasicHeader("X-Forwarded-For", Joiner.on(",").join(headerIPs));
RequestDefaultHeaders headerInterceptor = new RequestDefaultHeaders(Arrays.asList(xForwardHeader));
hClient.addRequestInterceptor(headerInterceptor);
}
// interceptor for adding username, pwd
HttpBasicAuthInterceptor authInt = new HttpBasicAuthInterceptor(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, null, null);
hClient.addRequestInterceptor(authInt);
transport = new THttpClient(httpUrl, hClient);
TCLIService.Client httpClient = getClient(transport);
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
TOpenSessionResp openResp = httpClient.OpenSession(openReq);
// execute a query
TExecuteStatementReq execReq = new TExecuteStatementReq(openResp.getSessionHandle(), "show tables");
httpClient.ExecuteStatement(execReq);
// capture arguments to authorizer impl call and verify ip addresses passed
ArgumentCaptor<HiveAuthzContext> contextCapturer = ArgumentCaptor.forClass(HiveAuthzContext.class);
verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class), Matchers.anyListOf(HivePrivilegeObject.class), Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
HiveAuthzContext context = contextCapturer.getValue();
System.err.println("Forwarded IP Addresses " + context.getForwardedAddresses());
List<String> auditIPAddresses = new ArrayList<String>(context.getForwardedAddresses());
Collections.sort(auditIPAddresses);
Collections.sort(headerIPs);
Assert.assertEquals("Checking forwarded IP Address", headerIPs, auditIPAddresses);
}
use of org.apache.hive.service.rpc.thrift.TOpenSessionReq in project hive by apache.
the class TestHs2ConnectionMetricsHttp method testOpenConnectionMetrics.
@Test
public void testOpenConnectionMetrics() throws Exception {
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
TCLIService.Client httpClient = getHttpClient();
TOpenSessionReq openSessionReq = new TOpenSessionReq();
TOpenSessionResp tOpenSessionResp = httpClient.OpenSession(openSessionReq);
// wait a couple of sec to make sure the connection is closed
TimeUnit.SECONDS.sleep(3);
verifyConnectionMetrics(metrics.dumpJson(), 0, 1);
TSessionHandle sessionHandle = tOpenSessionResp.getSessionHandle();
TCloseSessionReq closeSessionReq = new TCloseSessionReq(sessionHandle);
httpClient.CloseSession(closeSessionReq);
TimeUnit.SECONDS.sleep(3);
verifyConnectionMetrics(metrics.dumpJson(), 0, 2);
tOpenSessionResp = httpClient.OpenSession(openSessionReq);
TimeUnit.SECONDS.sleep(3);
verifyConnectionMetrics(metrics.dumpJson(), 0, 3);
sessionHandle = tOpenSessionResp.getSessionHandle();
closeSessionReq = new TCloseSessionReq(sessionHandle);
httpClient.CloseSession(closeSessionReq);
TimeUnit.SECONDS.sleep(3);
verifyConnectionMetrics(metrics.dumpJson(), 0, 4);
}
use of org.apache.hive.service.rpc.thrift.TOpenSessionReq in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method testCustomCookies.
/**
* Test additional http headers passed to request interceptor.
* @throws Exception
*/
@Test
public void testCustomCookies() throws Exception {
TTransport transport;
DefaultHttpClient hClient = new DefaultHttpClient();
String httpUrl = getHttpUrl();
Map<String, String> additionalHeaders = new HashMap<String, String>();
Map<String, String> cookieHeaders = new HashMap<String, String>();
cookieHeaders.put("key1", "value1");
cookieHeaders.put("key2", "value2");
HttpBasicAuthInterceptorWithLogging authInt = new HttpBasicAuthInterceptorWithLogging(ThriftCLIServiceTest.USERNAME, ThriftCLIServiceTest.PASSWORD, null, null, false, additionalHeaders, cookieHeaders);
hClient.addRequestInterceptor(authInt);
transport = new THttpClient(httpUrl, hClient);
TCLIService.Client httpClient = getClient(transport);
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
httpClient.OpenSession(openReq).getSessionHandle();
String cookieHeader = authInt.getCookieHeader();
assertTrue(cookieHeader.contains("key1=value1"));
assertTrue(cookieHeader.contains("key2=value2"));
}
use of org.apache.hive.service.rpc.thrift.TOpenSessionReq in project hive by apache.
the class TestThriftHttpCLIServiceFeatures method testOpenSessionExpectedException.
private void testOpenSessionExpectedException(TCLIService.Client client) {
boolean caughtEx = false;
// Create a new open session request object
TOpenSessionReq openReq = new TOpenSessionReq();
try {
client.OpenSession(openReq).getSessionHandle();
} catch (Exception e) {
caughtEx = true;
System.out.println("Exception expected: " + e.toString());
}
assertTrue("Exception expected", caughtEx);
}
Aggregations