use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestColumnAccess method testJoinView1AndTable2.
@Test
public void testJoinView1AndTable2() throws ParseException {
String query = "select * from v1 join t2 on (v1.id1 = t2.id1)";
Driver driver = createDriver();
int rc = driver.compile(query);
Assert.assertEquals("Checking command success", 0, rc);
QueryPlan plan = driver.getPlan();
// check access columns from ColumnAccessInfo
ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
// t1 is inside v1, we should not care about its access info.
List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
Assert.assertNull(cols);
// v1 is top level view, we should care about its access info.
cols = columnAccessInfo.getTableToColumnAccessMap().get("default@v1");
Assert.assertNotNull(cols);
Assert.assertEquals(2, cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3, cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
// check access columns from readEntity
Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
cols = tableColsMap.get("default@t1");
Assert.assertNull(cols);
cols = tableColsMap.get("default@v1");
Assert.assertNotNull(cols);
Assert.assertEquals(2, cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
cols = tableColsMap.get("default@t2");
Assert.assertNotNull(cols);
Assert.assertEquals(3, cols.size());
Assert.assertNotNull(cols.contains("id2"));
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestColumnAccess method testQueryTable1.
@Test
public void testQueryTable1() throws ParseException {
String query = "select * from t1";
Driver driver = createDriver();
int rc = driver.compile(query);
Assert.assertEquals("Checking command success", 0, rc);
QueryPlan plan = driver.getPlan();
// check access columns from ColumnAccessInfo
ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2, cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
// check access columns from readEntity
Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
cols = tableColsMap.get("default@t1");
Assert.assertNotNull(cols);
Assert.assertEquals(2, cols.size());
Assert.assertNotNull(cols.contains("id1"));
Assert.assertNotNull(cols.contains("name1"));
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testDDLExclusive.
@Test
public void testDDLExclusive() throws Exception {
WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_EXCLUSIVE);
QueryPlan qp = new MockQueryPlan(this);
txnMgr.acquireLocks(qp, ctx, "fred");
List<HiveLock> locks = ctx.getHiveLocks();
Assert.assertEquals(1, locks.size());
Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks = txnMgr.getLockManager().getLocks(false, false);
Assert.assertEquals(0, locks.size());
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testLockTimeout.
@Test
public void testLockTimeout() throws Exception {
addPartitionInput(newTable(true));
QueryPlan qp = new MockQueryPlan(this);
//make sure it works with nothing to expire
testLockExpiration(txnMgr, 0, true);
//create a few read locks, all on the same resource
for (int i = 0; i < 5; i++) {
// No heartbeat
((DbTxnManager) txnMgr).acquireLocks(qp, ctx, "PeterI" + i, true);
}
testLockExpiration(txnMgr, 5, true);
//create a lot of locks
for (int i = 0; i < TEST_TIMED_OUT_TXN_ABORT_BATCH_SIZE + 17; i++) {
// No heartbeat
((DbTxnManager) txnMgr).acquireLocks(qp, ctx, "PeterI" + i, true);
}
testLockExpiration(txnMgr, TEST_TIMED_OUT_TXN_ABORT_BATCH_SIZE + 17, true);
// Create a lock, but send the heartbeat with a long delay. The lock will get expired.
((DbTxnManager) txnMgr).acquireLocksWithHeartbeatDelay(qp, ctx, "bob", HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, TimeUnit.MILLISECONDS) * 10);
testLockExpiration(txnMgr, 1, true);
// Create a lock and trigger a heartbeat. With heartbeat, the lock won't expire.
txnMgr.acquireLocks(qp, ctx, "peter");
testLockExpiration(txnMgr, 1, false);
}
use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.
the class TestDbTxnManager method testSingleReadTable.
@Test
public void testSingleReadTable() throws Exception {
addTableInput();
QueryPlan qp = new MockQueryPlan(this);
txnMgr.acquireLocks(qp, ctx, "fred");
List<HiveLock> locks = ctx.getHiveLocks();
Assert.assertEquals(1, locks.size());
Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
txnMgr.getLockManager().unlock(locks.get(0));
locks = txnMgr.getLockManager().getLocks(false, false);
Assert.assertEquals(0, locks.size());
}
Aggregations