Search in sources :

Example 61 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testDDLExclusive.

@Test
public void testDDLExclusive() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_EXCLUSIVE);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.getLockManager().unlock(locks.get(0));
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 62 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testDDLShared.

@Test
public void testDDLShared() throws Exception {
    WriteEntity we = addTableOutput(WriteEntity.WriteType.DDL_SHARED);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.getLockManager().unlock(locks.get(0));
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 63 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method addDynamicPartitionedOutput.

private WriteEntity addDynamicPartitionedOutput(Table t, WriteEntity.WriteType writeType) throws Exception {
    DummyPartition dp = new DummyPartition(t, "no clue what I should call this");
    WriteEntity we = new WriteEntity(dp, writeType, false);
    writeEntities.add(we);
    return we;
}
Also used : DummyPartition(org.apache.hadoop.hive.ql.metadata.DummyPartition) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 64 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class TestDbTxnManager method testSingleWritePartition.

@Test
public void testSingleWritePartition() throws Exception {
    WriteEntity we = addPartitionOutput(newTable(true), WriteEntity.WriteType.INSERT);
    QueryPlan qp = new MockQueryPlan(this);
    txnMgr.openTxn(ctx, "fred");
    txnMgr.acquireLocks(qp, ctx, "fred");
    List<HiveLock> locks = ctx.getHiveLocks();
    Assert.assertEquals(1, locks.size());
    Assert.assertEquals(1, TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
    txnMgr.commitTxn();
    locks = txnMgr.getLockManager().getLocks(false, false);
    Assert.assertEquals(0, locks.size());
}
Also used : QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Test(org.junit.Test)

Example 65 with WriteEntity

use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.

the class DDLTask method dropPartitions.

private void dropPartitions(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveException {
    ReplicationSpec replicationSpec = dropTbl.getReplicationSpec();
    if (replicationSpec.isInReplicationScope()) {
        // parameter key values.
        for (DropTableDesc.PartSpec partSpec : dropTbl.getPartSpecs()) {
            try {
                for (Partition p : Iterables.filter(db.getPartitionsByFilter(tbl, partSpec.getPartSpec().getExprString()), replicationSpec.allowEventReplacementInto())) {
                    db.dropPartition(tbl.getDbName(), tbl.getTableName(), p.getValues(), true);
                }
            } catch (NoSuchObjectException e) {
            // ignore NSOE because that means there's nothing to drop.
            } catch (Exception e) {
                throw new HiveException(e.getMessage(), e);
            }
        }
        return;
    }
    // ifExists is currently verified in DDLSemanticAnalyzer
    List<Partition> droppedParts = db.dropPartitions(dropTbl.getTableName(), dropTbl.getPartSpecs(), PartitionDropOptions.instance().deleteData(true).ifExists(true).purgeData(dropTbl.getIfPurge()));
    for (Partition partition : droppedParts) {
        console.printInfo("Dropped the partition " + partition.getName());
        // We have already locked the table, don't lock the partitions.
        addIfAbsentByName(new WriteEntity(partition, WriteEntity.WriteType.DDL_NO_LOCK));
    }
    ;
}
Also used : Partition(org.apache.hadoop.hive.ql.metadata.Partition) AlterTableExchangePartition(org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition) ReplicationSpec(org.apache.hadoop.hive.ql.parse.ReplicationSpec) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Aggregations

WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)77 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)33 Table (org.apache.hadoop.hive.ql.metadata.Table)33 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)22 Partition (org.apache.hadoop.hive.ql.metadata.Partition)21 ArrayList (java.util.ArrayList)14 AlterTableExchangePartition (org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition)12 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)12 Referenceable (org.apache.atlas.typesystem.Referenceable)11 Path (org.apache.hadoop.fs.Path)11 Test (org.junit.Test)11 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)10 Test (org.testng.annotations.Test)9 HashMap (java.util.HashMap)8 LinkedHashMap (java.util.LinkedHashMap)8 Database (org.apache.hadoop.hive.metastore.api.Database)8 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)7 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)7 IOException (java.io.IOException)6 Map (java.util.Map)6