Search in sources :

Example 46 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestUniqueConstraint method createTableWithConstraintsPk.

@Test
public void createTableWithConstraintsPk() throws TException {
    String constraintName = "ctwcuc";
    Table table = new TableBuilder().setTableName("table_with_constraints").addCol("col1", "int").addCol("col2", "varchar(32)").build(metaStore.getConf());
    List<SQLUniqueConstraint> uc = new SQLUniqueConstraintBuilder().onTable(table).addColumn("col1").setConstraintName(constraintName).build(metaStore.getConf());
    client.createTableWithConstraints(table, null, null, uc, null, null, null);
    UniqueConstraintsRequest rqst = new UniqueConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLUniqueConstraint> fetched = client.getUniqueConstraints(rqst);
    Assert.assertEquals(uc, fetched);
    client.dropConstraint(table.getCatName(), table.getDbName(), table.getTableName(), constraintName);
    rqst = new UniqueConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    fetched = client.getUniqueConstraints(rqst);
    Assert.assertTrue(fetched.isEmpty());
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) Table(org.apache.hadoop.hive.metastore.api.Table) SQLUniqueConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 47 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestPrimaryKey method createTableWithConstraintsPkInOtherCatalog.

@Test
public void createTableWithConstraintsPkInOtherCatalog() throws TException {
    Table table = new TableBuilder().setTableName("table_in_other_catalog_with_constraints").inDb(inOtherCatalog).addCol("col1", "int").addCol("col2", "varchar(32)").build(metaStore.getConf());
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(table).addColumn("col1").build(metaStore.getConf());
    client.createTableWithConstraints(table, pk, null, null, null, null, null);
    PrimaryKeysRequest rqst = new PrimaryKeysRequest(table.getDbName(), table.getTableName());
    rqst.setCatName(table.getCatName());
    List<SQLPrimaryKey> fetched = client.getPrimaryKeys(rqst);
    pk.get(0).setPk_name(fetched.get(0).getPk_name());
    Assert.assertEquals(pk, fetched);
    client.dropConstraint(table.getCatName(), table.getDbName(), table.getTableName(), pk.get(0).getPk_name());
    rqst = new PrimaryKeysRequest(table.getDbName(), table.getTableName());
    rqst.setCatName(table.getCatName());
    fetched = client.getPrimaryKeys(rqst);
    Assert.assertTrue(fetched.isEmpty());
}
Also used : SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 48 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestPrimaryKey method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Before(org.junit.Before)

Example 49 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestGetTableMeta method tablesInDifferentCatalog.

@Test
public void tablesInDifferentCatalog() throws TException {
    String catName = "get_table_meta_catalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "db9";
    // For this one don't specify a location to make sure it gets put in the catalog directory
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    String[] tableNames = { "table_in_other_catalog_1", "table_in_other_catalog_2", "random_name" };
    List<TableMeta> expected = new ArrayList<>(tableNames.length);
    for (int i = 0; i < tableNames.length; i++) {
        client.createTable(new TableBuilder().inDb(db).setTableName(tableNames[i]).addCol("id", "int").addCol("name", "string").build(metaStore.getConf()));
        Table table = client.getTable(catName, dbName, tableNames[i]);
        TableMeta tableMeta = new TableMeta(dbName, tableNames[i], table.getTableType());
        tableMeta.setCatName(catName);
        expected.add(tableMeta);
    }
    List<String> typesList = Lists.newArrayList(TableType.MANAGED_TABLE.name(), TableType.EXTERNAL_TABLE.name());
    List<String> types = Collections.unmodifiableList(typesList);
    List<TableMeta> actual = client.getTableMeta(catName, dbName, "*", types);
    assertTableMetas(expected, actual, 0, 1, 2);
    actual = client.getTableMeta(catName, "*", "table_*", types);
    assertTableMetas(expected, actual, 0, 1);
    actual = client.getTableMeta(dbName, "table_in_other_catalog_*", types);
    assertTableMetas(expected, actual);
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableMeta(org.apache.hadoop.hive.metastore.api.TableMeta) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 50 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestDefaultConstraint method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Before(org.junit.Before)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15