use of org.apache.hive.jdbc.HiveConnection in project kylo by Teradata.
the class TestKerberosKinit method testHiveJdbcConnection.
private void testHiveJdbcConnection(final String configResources, final String keytab, final String realUserPrincipal, final String proxyUser, final String hiveHostName) throws Exception {
final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab, realUserPrincipal);
System.out.println(" ");
System.out.println("Sucessfully got a kerberos ticket in the JVM");
HiveConnection realUserConnection = (HiveConnection) realugi.doAs(new PrivilegedExceptionAction<Connection>() {
public Connection run() {
Connection connection = null;
Statement stmt = null;
ResultSet res = null;
try {
Class.forName(DRIVER_NAME);
String url = hiveHostName;
if (proxyUser != null) {
url = url + ";hive.server2.proxy.user=" + proxyUser;
}
System.out.println("Hive URL: " + url);
connection = DriverManager.getConnection(url);
Class.forName(DRIVER_NAME);
System.out.println("creating statement");
stmt = connection.createStatement();
String sql = "show databases";
res = stmt.executeQuery(sql);
System.out.println(" \n");
System.out.println("Executing the Hive Query:");
System.out.println(" ");
System.out.println("List of Databases");
while (res.next()) {
System.out.println(res.getString(1));
}
} catch (Exception e) {
throw new RuntimeException("Error creating connection with proxy user", e);
} finally {
JdbcUtils.closeResultSet(res);
JdbcUtils.closeStatement(stmt);
JdbcUtils.closeConnection(connection);
}
return connection;
}
});
}
use of org.apache.hive.jdbc.HiveConnection in project hive by apache.
the class TestHttpCookieAuthenticationTest method getCookieStoreFromConnection.
// ((InternalHttpClient) ((THttpClient) ((HiveConnection) connection).transport).client).cookieStore.getCookies()
private CookieStore getCookieStoreFromConnection(Connection connection) throws Exception {
CookieStore cookieStore = null;
if (connection instanceof HiveConnection) {
HiveConnection hiveConnection = (HiveConnection) connection;
Field transportField = hiveConnection.getClass().getDeclaredField("transport");
transportField.setAccessible(true);
TTransport transport = (TTransport) transportField.get(hiveConnection);
if (transport instanceof THttpClient) {
THttpClient httpTransport = (THttpClient) transport;
Field clientField = httpTransport.getClass().getDeclaredField("client");
clientField.setAccessible(true);
HttpClient httpClient = (HttpClient) clientField.get(httpTransport);
Field cookieStoreField = httpClient.getClass().getDeclaredField("cookieStore");
cookieStoreField.setAccessible(true);
cookieStore = (CookieStore) cookieStoreField.get(httpClient);
}
}
return cookieStore;
}
use of org.apache.hive.jdbc.HiveConnection in project hive by apache.
the class TestJdbcWithMiniKdc method testTokenAuth.
/**
* Test token based authentication over kerberos
* Login as super user and retrieve the token for normal user
* use the token to connect connect as normal user
* @throws Exception
*/
@Test
public void testTokenAuth() throws Exception {
miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_SUPER_USER);
hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
// retrieve token and store in the cache
String token = ((HiveConnection) hs2Conn).getDelegationToken(MiniHiveKdc.HIVE_TEST_USER_1, MiniHiveKdc.HIVE_SERVICE_PRINCIPAL);
assertTrue(token != null && !token.isEmpty());
hs2Conn.close();
UserGroupInformation ugi = miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
// Store token in the cache
storeToken(token, ugi);
hs2Conn = DriverManager.getConnection(miniHS2.getBaseJdbcURL() + "default;auth=delegationToken");
verifyProperty(SESSION_USER_NAME, MiniHiveKdc.HIVE_TEST_USER_1);
}
use of org.apache.hive.jdbc.HiveConnection in project hive by apache.
the class TestHs2Hooks method testPostAnalysisHookContexts.
@Test
public void testPostAnalysisHookContexts() throws Throwable {
Properties connProp = new Properties();
connProp.setProperty("user", System.getProperty("user.name"));
connProp.setProperty("password", "");
HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp);
Statement stmt = connection.createStatement();
stmt.execute("create table testPostAnalysisHookContexts as select '3'");
Throwable error = PostExecHook.error;
if (error != null) {
throw error;
}
error = PreExecHook.error;
if (error != null) {
throw error;
}
Assert.assertEquals(HiveOperation.CREATETABLE_AS_SELECT, SemanticAnalysisHook.commandType);
error = SemanticAnalysisHook.preAnalyzeError;
if (error != null) {
throw error;
}
error = SemanticAnalysisHook.postAnalyzeError;
if (error != null) {
throw error;
}
stmt.close();
connection.close();
}
use of org.apache.hive.jdbc.HiveConnection in project hive by apache.
the class TestHs2Hooks method testHookContexts.
/**
* Test that hook context properties are correctly set.
*/
@Test
public void testHookContexts() throws Throwable {
Properties connProp = new Properties();
connProp.setProperty("user", System.getProperty("user.name"));
connProp.setProperty("password", "");
HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp);
Statement stmt = connection.createStatement();
stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
Throwable error = PostExecHook.error;
if (error != null) {
throw error;
}
error = PreExecHook.error;
if (error != null) {
throw error;
}
Assert.assertEquals(System.getProperty("user.name"), PostExecHook.userName);
Assert.assertNotNull(PostExecHook.ipAddress, "ipaddress is null");
Assert.assertNotNull(PostExecHook.userName, "userName is null");
Assert.assertNotNull(PostExecHook.operation, "operation is null");
Assert.assertTrue(PostExecHook.ipAddress, PostExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
Assert.assertEquals(System.getProperty("user.name"), PreExecHook.userName);
Assert.assertNotNull("ipaddress is null", PreExecHook.ipAddress);
Assert.assertNotNull("userName is null", PreExecHook.userName);
Assert.assertNotNull("operation is null", PreExecHook.operation);
Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
error = SemanticAnalysisHook.preAnalyzeError;
if (error != null) {
throw error;
}
error = SemanticAnalysisHook.postAnalyzeError;
if (error != null) {
throw error;
}
Assert.assertNotNull("semantic hook context ipaddress is null", SemanticAnalysisHook.ipAddress);
Assert.assertNotNull("semantic hook context userName is null", SemanticAnalysisHook.userName);
Assert.assertNotNull("semantic hook context command is null", SemanticAnalysisHook.command);
Assert.assertNotNull("semantic hook context commandType is null", SemanticAnalysisHook.commandType);
Assert.assertTrue(SemanticAnalysisHook.ipAddress, SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("show tables", SemanticAnalysisHook.command);
stmt.close();
connection.close();
}
Aggregations