Search in sources :

Example 1 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method addTableOutput.

private WriteEntity addTableOutput(WriteEntity.WriteType writeType) {
    WriteEntity we = new WriteEntity(newTable(false), writeType);
    writeEntities.add(we);
    return we;
}
Also used : WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 2 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testRollback.

@Test
public void testRollback() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn(ctx, "fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.rollbackTxn();
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 3 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testReadWrite.

@Test
public void testReadWrite() throws Exception {
    Table t = newTable(true);
    addPartitionInput(t);
    addPartitionInput(t);
    addPartitionInput(t);
    WriteEntity we = addTableOutput(WriteEntity.WriteType.INSERT);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn(ctx, "fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(4, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.commitTxn();
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 4 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testWriteDynamicPartition.

@Test
public void testWriteDynamicPartition() throws Exception {
    WriteEntity we = addDynamicPartitionedOutput(newTable(true), WriteEntity.WriteType.INSERT);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn(ctx, "fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    /*Assert.assertEquals(1,
        TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    */
    // Make sure we're locking the whole table, since this is dynamic partitioning
    ShowLocksResponse rsp = ((DbLockManager) txnMgr.getLockManager()).getLocks();
    List<ShowLocksResponseElement> elms = rsp.getLocks();
    Assert.assertEquals(1, elms.size());
    Assert.assertNotNull(elms.get(0).getTablename());
    Assert.assertNull(elms.get(0).getPartname());
    txnMgr.commitTxn();
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) ShowLocksResponse(org.apache.hadoop.hive.metastore.api.ShowLocksResponse) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) ShowLocksResponseElement(org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement) Test(org.junit.Test)

Example 5 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testDDLNoLock.

@Test
public void testDDLNoLock() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_NO_LOCK);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertNull(locks);
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Aggregations

WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)77 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)33 Table (org.apache.hadoop.hive.ql.metadata.Table)33 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)22 Partition (org.apache.hadoop.hive.ql.metadata.Partition)21 ArrayList (java.util.ArrayList)14 AlterTableExchangePartition (org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition)12 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)12 Referenceable (org.apache.atlas.typesystem.Referenceable)11 Path (org.apache.hadoop.fs.Path)11 Test (org.junit.Test)11 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)10 Test (org.testng.annotations.Test)9 HashMap (java.util.HashMap)8 LinkedHashMap (java.util.LinkedHashMap)8 Database (org.apache.hadoop.hive.metastore.api.Database)8 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)7 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)7 IOException (java.io.IOException)6 Map (java.util.Map)6