Search in sources :

Example 76 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropTableWithDifferentEncryptionZonesDifferentKey.

@Test
public void dropTableWithDifferentEncryptionZonesDifferentKey() throws Throwable {
    String dbName1 = "encrdbdiffkey1";
    String dbName2 = "encrdbdiffkey2";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName1);
    silentDropDatabase(dbName2);
    new DatabaseBuilder().setName(dbName1).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    new DatabaseBuilder().setName(dbName2).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    Path dirDb1 = new Path(warehouse.getWhRoot(), dbName1 + ".db");
    warehouseFs.delete(dirDb1, true);
    warehouseFs.mkdirs(dirDb1);
    EncryptionZoneUtils.createEncryptionZone(dirDb1, "test_key_db", conf);
    Path dirTbl1 = new Path(dirDb1, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirDb2 = new Path(warehouse.getWhRoot(), dbName2 + ".db");
    warehouseFs.delete(dirDb2, true);
    warehouseFs.mkdirs(dirDb2);
    EncryptionZoneUtils.createEncryptionZone(dirDb2, "test_key_cm", conf);
    Path dirTbl2 = new Path(dirDb2, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    new TableBuilder().setDbName(dbName1).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName1, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName2).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName1, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName1, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName2, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName2, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 77 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropTableWithoutEncryptionZonesForCm.

@Test
public void dropTableWithoutEncryptionZonesForCm() throws Throwable {
    String dbName = "simpdb1";
    String tblName = "simptbl";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName);
    Assert.assertNotNull(tbl);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.mkdirs(dirDb);
    Path dirTbl1 = new Path(dirDb, tblName);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName);
    } catch (Exception e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) TException(org.apache.thrift.TException) IOException(java.io.IOException) RemoteException(org.apache.hadoop.ipc.RemoteException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) Test(org.junit.Test)

Example 78 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropExternalTableWithSameEncryptionZonesForCm.

@Test
public void dropExternalTableWithSameEncryptionZonesForCm() throws Throwable {
    String dbName = "encrdb4";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.delete(dirDb, true);
    warehouseFs.mkdirs(dirDb);
    EncryptionZoneUtils.createEncryptionZone(dirDb, "test_key_db", conf);
    Path dirTbl1 = new Path(dirDb, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirTbl2 = new Path(dirDb, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 79 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method truncateTableWithoutEncryptionZonesForCm.

@Test
public void truncateTableWithoutEncryptionZonesForCm() throws Throwable {
    String dbName = "simpdb3";
    String tblName = "simptbl";
    String typeName = "Person";
    client.dropTable(dbName, tblName);
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl2 = client.getTable(dbName, tblName);
    Assert.assertNotNull(tbl2);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.mkdirs(dirDb);
    Path dirTbl1 = new Path(dirDb, tblName);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    boolean exceptionThrown = false;
    try {
        client.truncateTable(dbName, tblName, null);
    } catch (Exception e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) TException(org.apache.thrift.TException) IOException(java.io.IOException) RemoteException(org.apache.hadoop.ipc.RemoteException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) Test(org.junit.Test)

Example 80 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropTableWithSameEncryptionZones.

@Test
public void dropTableWithSameEncryptionZones() throws Throwable {
    String dbName = "encrdb3";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.delete(dirDb, true);
    warehouseFs.mkdirs(dirDb);
    EncryptionZoneUtils.createEncryptionZone(dirDb, "test_key_db", conf);
    Path dirTbl1 = new Path(dirDb, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirTbl2 = new Path(dirDb, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15