use of java.sql.Connection in project hive by apache.
the class TestJdbcWithMiniHS2 method testParallelCompilation.
@Test
public void testParallelCompilation() throws Exception {
Statement stmt = conTestDb.createStatement();
stmt.execute("set hive.driver.parallel.compilation=true");
stmt.execute("set hive.server2.async.exec.async.compile=true");
stmt.close();
startConcurrencyTest(conTestDb, tableName, 10);
Connection conn = getConnection(testDbName);
startConcurrencyTest(conn, tableName, 10);
conn.close();
}
use of java.sql.Connection in project hive by apache.
the class TestJdbcWithMiniHS2 method testURIDatabaseName.
/** This test is to connect to any database without using the command "Use <<DB>>"
* 1) connect to default database.
* 2) Create a new DB test_default.
* 3) Connect to test_default database.
* 4) Connect and create table under test_default_test.
* 5) Connect and display all tables.
* 6) Connect to default database and shouldn't find table test_default_test.
* 7) Connect and drop test_default_test.
* 8) drop test_default database.
*/
@Test
public void testURIDatabaseName() throws Exception {
String jdbcUri = miniHS2.getJdbcURL().substring(0, miniHS2.getJdbcURL().indexOf("default"));
Connection conn = getConnection(jdbcUri + "default", System.getProperty("user.name"), "bar");
String dbName = "test_connection_non_default_db";
String tableInNonDefaultSchema = "table_in_non_default_schema";
Statement stmt = conn.createStatement();
stmt.execute("create database if not exists " + dbName);
stmt.close();
conn.close();
conn = getConnection(jdbcUri + dbName, System.getProperty("user.name"), "bar");
stmt = conn.createStatement();
boolean expected = stmt.execute(" create table " + tableInNonDefaultSchema + " (x int)");
stmt.close();
conn.close();
conn = getConnection(jdbcUri + dbName, System.getProperty("user.name"), "bar");
stmt = conn.createStatement();
ResultSet res = stmt.executeQuery("show tables");
boolean testTableExists = false;
while (res.next()) {
assertNotNull("table name is null in result set", res.getString(1));
if (tableInNonDefaultSchema.equalsIgnoreCase(res.getString(1))) {
testTableExists = true;
}
}
assertTrue("table name " + tableInNonDefaultSchema + " found in SHOW TABLES result set", testTableExists);
stmt.close();
conn.close();
conn = getConnection(jdbcUri + "default", System.getProperty("user.name"), "bar");
stmt = conn.createStatement();
res = stmt.executeQuery("show tables");
testTableExists = false;
while (res.next()) {
assertNotNull("table name is null in result set", res.getString(1));
if (tableInNonDefaultSchema.equalsIgnoreCase(res.getString(1))) {
testTableExists = true;
}
}
assertFalse("table name " + tableInNonDefaultSchema + " NOT found in SHOW TABLES result set", testTableExists);
stmt.close();
conn.close();
conn = getConnection(jdbcUri + dbName, System.getProperty("user.name"), "bar");
stmt = conn.createStatement();
stmt.execute("set hive.support.concurrency = false");
res = stmt.executeQuery("show tables");
stmt.execute(" drop table if exists table_in_non_default_schema");
expected = stmt.execute("DROP DATABASE " + dbName);
stmt.close();
conn.close();
conn = getConnection(jdbcUri + "default", System.getProperty("user.name"), "bar");
stmt = conn.createStatement();
res = stmt.executeQuery("show tables");
testTableExists = false;
while (res.next()) {
assertNotNull("table name is null in result set", res.getString(1));
if (tableInNonDefaultSchema.equalsIgnoreCase(res.getString(1))) {
testTableExists = true;
}
}
// test URI with no dbName
conn = getConnection(jdbcUri, System.getProperty("user.name"), "bar");
verifyCurrentDB("default", conn);
conn.close();
conn = getConnection(jdbcUri + ";", System.getProperty("user.name"), "bar");
verifyCurrentDB("default", conn);
conn.close();
conn = getConnection(jdbcUri + ";/foo=bar;foo1=bar1", System.getProperty("user.name"), "bar");
verifyCurrentDB("default", conn);
conn.close();
}
use of java.sql.Connection in project hive by apache.
the class TestJdbcWithMiniHS2 method testAddJarDataNucleusUnCaching.
/**
* Tests that DataNucleus' NucleusContext.classLoaderResolverMap clears cached class objects
* (& hence doesn't leak classloaders) on closing any session
*
* @throws Exception
*/
@Test
public void testAddJarDataNucleusUnCaching() throws Exception {
Path jarFilePath = getHiveContribJarPath();
// We need a new connection object as we'll check the cache size after connection close
Connection conn = getConnection(miniHS2.getJdbcURL(testDbName), System.getProperty("user.name"), "password");
Statement stmt = conn.createStatement();
int mapSizeAfterClose;
// Add the jar file
stmt.execute("ADD JAR " + jarFilePath.toString());
// Create a temporary function using the jar
stmt.execute("CREATE TEMPORARY FUNCTION add_func AS '" + testUdfClassName + "'");
ResultSet res = stmt.executeQuery("DESCRIBE FUNCTION add_func");
checkForNotExist(res);
// Execute the UDF
stmt.execute("SELECT add_func(int_col, 1) from " + tableName + " limit 1");
// Close the connection
conn.close();
mapSizeAfterClose = getNucleusClassLoaderResolverMapSize();
System.out.println("classLoaderResolverMap size after connection close: " + mapSizeAfterClose);
// Cache size should be 0 now
Assert.assertTrue("Failed; NucleusContext classLoaderResolverMap size: " + mapSizeAfterClose, mapSizeAfterClose == 0);
}
use of java.sql.Connection in project hive by apache.
the class TestJdbcWithMiniHS2 method testSessionScratchDirs.
/**
* Tests the creation of the 3 scratch dirs: hdfs, local, downloaded resources (which is also local).
* 1. Test with doAs=false: open a new JDBC session and verify the presence of directories/permissions
* 2. Test with doAs=true: open a new JDBC session and verify the presence of directories/permissions
* @throws Exception
*/
@Test
public void testSessionScratchDirs() throws Exception {
// Stop HiveServer2
stopMiniHS2();
HiveConf conf = new HiveConf();
String userName;
Path scratchDirPath;
// Set a custom prefix for hdfs scratch dir path
conf.set("hive.exec.scratchdir", tmpDir + "/hs2");
// Set a scratch dir permission
String fsPermissionStr = "700";
conf.set("hive.scratch.dir.permission", fsPermissionStr);
// Start an instance of HiveServer2 which uses miniMR
startMiniHS2(conf);
// 1. Test with doAs=false
String sessionConf = "hive.server2.enable.doAs=false";
userName = System.getProperty("user.name");
Connection conn = getConnection(miniHS2.getJdbcURL(testDbName, sessionConf), userName, "password");
// FS
FileSystem fs = miniHS2.getLocalFS();
FsPermission expectedFSPermission = new FsPermission(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIRPERMISSION));
// Verify scratch dir paths and permission
// HDFS scratch dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR) + "/" + userName);
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, false);
// Local scratch dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true);
// Downloaded resources dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true);
conn.close();
// 2. Test with doAs=true
sessionConf = "hive.server2.enable.doAs=true";
// Test for user "neo"
userName = "neo";
conn = getConnection(miniHS2.getJdbcURL(testDbName, sessionConf), userName, "the-one");
// Verify scratch dir paths and permission
// HDFS scratch dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR) + "/" + userName);
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, false);
// Local scratch dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true);
// Downloaded resources dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true);
conn.close();
// Restore original state
restoreMiniHS2AndConnections();
}
use of java.sql.Connection in project hive by apache.
the class TestJdbcWithMiniHS2 method testRootScratchDir.
/**
* Tests the creation of the root hdfs scratch dir, which should be writable by all.
*
* @throws Exception
*/
@Test
public void testRootScratchDir() throws Exception {
// Stop HiveServer2
stopMiniHS2();
HiveConf conf = new HiveConf();
String userName;
Path scratchDirPath;
conf.set("hive.exec.scratchdir", tmpDir + "/hs2");
// Start an instance of HiveServer2 which uses miniMR
startMiniHS2(conf);
userName = System.getProperty("user.name");
Connection conn = getConnection(miniHS2.getJdbcURL(testDbName), userName, "password");
// FS
FileSystem fs = miniHS2.getLocalFS();
FsPermission expectedFSPermission = new FsPermission((short) 00733);
// Verify scratch dir paths and permission
// HDFS scratch dir
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, false);
conn.close();
// Test with multi-level scratch dir path
// Stop HiveServer2
stopMiniHS2();
conf.set("hive.exec.scratchdir", tmpDir + "/level1/level2/level3");
startMiniHS2(conf);
conn = getConnection(miniHS2.getJdbcURL(testDbName), userName, "password");
scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR));
verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, false);
conn.close();
// Restore original state
restoreMiniHS2AndConnections();
}
Aggregations