Search in sources :

Example 6 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropTableWithDifferentEncryptionZonesSameKey.

@Test
public void dropTableWithDifferentEncryptionZonesSameKey() throws Throwable {
    String dbName1 = "encrdbsamekey1";
    String dbName2 = "encrdbsamekey2";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName1);
    silentDropDatabase(dbName2);
    new DatabaseBuilder().setName(dbName1).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    new DatabaseBuilder().setName(dbName2).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    Path dirDb1 = new Path(warehouse.getWhRoot(), dbName1 + ".db");
    warehouseFs.mkdirs(dirDb1);
    EncryptionZoneUtils.createEncryptionZone(dirDb1, "test_key_db", conf);
    Path dirTbl1 = new Path(dirDb1, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirDb2 = new Path(warehouse.getWhRoot(), dbName2 + ".db");
    warehouseFs.mkdirs(dirDb2);
    EncryptionZoneUtils.createEncryptionZone(dirDb2, "test_key_db", conf);
    Path dirTbl2 = new Path(dirDb2, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    new TableBuilder().setDbName(dbName1).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName1, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName2).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName1, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName1, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName2, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName2, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 7 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method truncateTableWithDifferentEncryptionZones.

@Test
public void truncateTableWithDifferentEncryptionZones() throws Throwable {
    String dbName1 = "encrdbtrunc1";
    String dbName2 = "encrdbtrunc2";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName1);
    silentDropDatabase(dbName2);
    new DatabaseBuilder().setName(dbName1).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    new DatabaseBuilder().setName(dbName2).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    Path dirDb1 = new Path(warehouse.getWhRoot(), dbName1 + ".db");
    warehouseFs.delete(dirDb1, true);
    warehouseFs.mkdirs(dirDb1);
    EncryptionZoneUtils.createEncryptionZone(dirDb1, "test_key_db", conf);
    Path dirTbl1 = new Path(dirDb1, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirDb2 = new Path(warehouse.getWhRoot(), dbName2 + ".db");
    warehouseFs.delete(dirDb2, true);
    warehouseFs.mkdirs(dirDb2);
    EncryptionZoneUtils.createEncryptionZone(dirDb2, "test_key_db", conf);
    Path dirTbl2 = new Path(dirDb2, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    new TableBuilder().setDbName(dbName1).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName1, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName2).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    boolean exceptionThrown = false;
    try {
        client.truncateTable(dbName1, tblName1, null);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    assertNotNull(client.getTable(dbName1, tblName1));
    exceptionThrown = false;
    try {
        client.truncateTable(dbName2, tblName2, null);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    assertNotNull(client.getTable(dbName2, tblName2));
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 8 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropTableWithTableAtEncryptionZoneRoot.

@Test
public void dropTableWithTableAtEncryptionZoneRoot() throws Throwable {
    String dbName = "encrdbroot";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.mkdirs(dirDb);
    Path dirTbl1 = new Path(dirDb, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    EncryptionZoneUtils.createEncryptionZone(dirTbl1, "test_key_db", conf);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirTbl2 = new Path(dirDb, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    EncryptionZoneUtils.createEncryptionZone(dirTbl2, "test_key_cm", conf);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
        assertTrue(e.getMessage().contains("can't be moved from encryption zone"));
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    assertTrue(warehouseFs.exists(new Path(dirTbl1, cmrootEncrypted)));
    assertTrue(warehouseFs.exists(new Path(dirTbl2, cmrootEncrypted)));
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 9 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropExternalTableWithDifferentEncryptionZones.

@Test
public void dropExternalTableWithDifferentEncryptionZones() throws Throwable {
    String dbName = "encrdb5";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.mkdirs(dirDb);
    Path dirTbl1 = new Path(dirDb, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    EncryptionZoneUtils.createEncryptionZone(dirTbl1, "test_key_db", conf);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirTbl2 = new Path(dirDb, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    EncryptionZoneUtils.createEncryptionZone(dirTbl2, "test_key_db", conf);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 10 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestMetaStoreMultipleEncryptionZones method dropExternalTableWithDifferentEncryptionZonesDifferentKey.

@Test
public void dropExternalTableWithDifferentEncryptionZonesDifferentKey() throws Throwable {
    String dbName = "encrdb6";
    String tblName1 = "encrtbl1";
    String tblName2 = "encrtbl2";
    String typeName = "Person";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).addParam("repl.source.for", "1, 2, 3").create(client, hiveConf);
    client.dropType(typeName);
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    new TableBuilder().setDbName(dbName).setTableName(tblName1).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Table tbl = client.getTable(dbName, tblName1);
    Assert.assertNotNull(tbl);
    new TableBuilder().setDbName(dbName).setTableName(tblName2).setCols(typ1.getFields()).setNumBuckets(1).addBucketCol("name").addTableParam("EXTERNAL", "true").addTableParam("external.table.purge", "true").addStorageDescriptorParam("test_param_1", "Use this for comments etc").create(client, hiveConf);
    Path dirDb = new Path(warehouse.getWhRoot(), dbName + ".db");
    warehouseFs.mkdirs(dirDb);
    Path dirTbl1 = new Path(dirDb, tblName1);
    warehouseFs.mkdirs(dirTbl1);
    EncryptionZoneUtils.createEncryptionZone(dirTbl1, "test_key_db", conf);
    Path part11 = new Path(dirTbl1, "part1");
    createFile(part11, "testClearer11");
    Path dirTbl2 = new Path(dirDb, tblName2);
    warehouseFs.mkdirs(dirTbl2);
    EncryptionZoneUtils.createEncryptionZone(dirTbl2, "test_key_cm", conf);
    Path part12 = new Path(dirTbl2, "part1");
    createFile(part12, "testClearer12");
    boolean exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName1);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part11));
    try {
        client.getTable(dbName, tblName1);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
    exceptionThrown = false;
    try {
        client.dropTable(dbName, tblName2);
    } catch (MetaException e) {
        exceptionThrown = true;
    }
    assertFalse(exceptionThrown);
    assertFalse(warehouseFs.exists(part12));
    try {
        client.getTable(dbName, tblName2);
    } catch (NoSuchObjectException e) {
        exceptionThrown = true;
    }
    assertTrue(exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) RecycleType(org.apache.hadoop.hive.metastore.ReplChangeManager.RecycleType) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15