Search in sources :

Example 26 with QueryPlan

use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.

the class TestColumnAccess method testJoinView1AndTable2.

@Test
public void testJoinView1AndTable2() throws ParseException {
    String query = "select * from v1 join t2 on (v1.id1 = t2.id1)";
    Driver driver = createDriver();
    int rc = driver.compile(query);
    Assert.assertEquals("Checking command success", 0, rc);
    QueryPlan plan = driver.getPlan();
    // check access columns from ColumnAccessInfo
    ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
    // t1 is inside v1, we should not care about its access info.
    List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
    Assert.assertNull(cols);
    // v1 is top level view, we should care about its access info.
    cols = columnAccessInfo.getTableToColumnAccessMap().get("default@v1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t2");
    Assert.assertNotNull(cols);
    Assert.assertEquals(3, cols.size());
    Assert.assertNotNull(cols.contains("id2"));
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    // check access columns from readEntity
    Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
    cols = tableColsMap.get("default@t1");
    Assert.assertNull(cols);
    cols = tableColsMap.get("default@v1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    cols = tableColsMap.get("default@t2");
    Assert.assertNotNull(cols);
    Assert.assertEquals(3, cols.size());
    Assert.assertNotNull(cols.contains("id2"));
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
}
Also used : Driver(org.apache.hadoop.hive.ql.Driver) List(java.util.List) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) Test(org.junit.Test)

Example 27 with QueryPlan

use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.

the class TestColumnAccess method testQueryTable1.

@Test
public void testQueryTable1() throws ParseException {
    String query = "select * from t1";
    Driver driver = createDriver();
    int rc = driver.compile(query);
    Assert.assertEquals("Checking command success", 0, rc);
    QueryPlan plan = driver.getPlan();
    // check access columns from ColumnAccessInfo
    ColumnAccessInfo columnAccessInfo = plan.getColumnAccessInfo();
    List<String> cols = columnAccessInfo.getTableToColumnAccessMap().get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
    // check access columns from readEntity
    Map<String, List<String>> tableColsMap = getColsFromReadEntity(plan.getInputs());
    cols = tableColsMap.get("default@t1");
    Assert.assertNotNull(cols);
    Assert.assertEquals(2, cols.size());
    Assert.assertNotNull(cols.contains("id1"));
    Assert.assertNotNull(cols.contains("name1"));
}
Also used : Driver(org.apache.hadoop.hive.ql.Driver) List(java.util.List) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) Test(org.junit.Test)

Example 28 with QueryPlan

use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.

the class TestDbTxnManager method testDDLExclusive.

@Test
public void testDDLExclusive() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_EXCLUSIVE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.getLockManager().unlock(locks.get(0));
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 29 with QueryPlan

use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.

the class TestDbTxnManager method testLockTimeout.

@Test
public void testLockTimeout() throws Exception {
    addPartitionInput(newTable(true));
    QueryPlan qp = new MockQueryPlan(this);
    //make sure it works with nothing to expire
    testLockExpiration(txnMgr, 0, true);
    //create a few read locks, all on the same resource
    for (int i = 0; i < 5; i++) {
        // No heartbeat
        ((DbTxnManager) txnMgr).acquireLocks(qp, ctx, "PeterI" + i, true);
    }
    testLockExpiration(txnMgr, 5, true);
    //create a lot of locks
    for (int i = 0; i < TEST_TIMED_OUT_TXN_ABORT_BATCH_SIZE + 17; i++) {
        // No heartbeat
        ((DbTxnManager) txnMgr).acquireLocks(qp, ctx, "PeterI" + i, true);
    }
    testLockExpiration(txnMgr, TEST_TIMED_OUT_TXN_ABORT_BATCH_SIZE + 17, true);
    // Create a lock, but send the heartbeat with a long delay. The lock will get expired.
    ((DbTxnManager) txnMgr).acquireLocksWithHeartbeatDelay(qp, ctx, "bob", HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, TimeUnit.MILLISECONDS) * 10);
    testLockExpiration(txnMgr, 1, true);
    // Create a lock and trigger a heartbeat. With heartbeat, the lock won't expire.
    txnMgr.acquireLocks(qp, ctx, "peter");
    testLockExpiration(txnMgr, 1, false);
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) Test(org.junit.Test)

Example 30 with QueryPlan

use of org.apache.hadoop.hive.ql.QueryPlan in project hive by apache.

the class TestDbTxnManager method testSingleReadTable.

@Test
public void testSingleReadTable() throws Exception {
    addTableInput();
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.getLockManager().unlock(locks.get(0));
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) Test(org.junit.Test)

Aggregations

QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)34 Test (org.junit.Test)21 HiveConf (org.apache.hadoop.hive.conf.HiveConf)11 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)10 List (java.util.List)7 Driver (org.apache.hadoop.hive.ql.Driver)6 IOException (java.io.IOException)4 LinkedHashMap (java.util.LinkedHashMap)4 Table (org.apache.hadoop.hive.ql.metadata.Table)4 SessionState (org.apache.hadoop.hive.ql.session.SessionState)4 LogHelper (org.apache.hadoop.hive.ql.session.SessionState.LogHelper)4 FileSystem (org.apache.hadoop.fs.FileSystem)3 Path (org.apache.hadoop.fs.Path)3 Context (org.apache.hadoop.hive.ql.Context)3 TezTask (org.apache.hadoop.hive.ql.exec.tez.TezTask)3 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 LlapIOCounters (org.apache.hadoop.hive.llap.counters.LlapIOCounters)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2