Search in sources :

Example 36 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class NonCatCallsWithCatalog method listPartitions.

@Test
public void listPartitions() throws TException {
    String dbName = "list_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).build(conf);
    db.unsetCatalogName();
    client.createDatabase(db);
    String tableName = "table_in_other_catalog";
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").build(conf);
    table.unsetCatName();
    client.createTable(table);
    Partition[] parts = new Partition[5];
    for (int i = 0; i < parts.length; i++) {
        parts[i] = new PartitionBuilder().inTable(table).addValue("a" + i).build(conf);
        parts[i].unsetCatName();
    }
    client.add_partitions(Arrays.asList(parts));
    List<Partition> fetched = client.listPartitions(dbName, tableName, (short) -1);
    Assert.assertEquals(parts.length, fetched.size());
    Assert.assertEquals(expectedCatalog(), fetched.get(0).getCatName());
    fetched = client.listPartitions(dbName, tableName, Collections.singletonList("a0"), (short) -1);
    Assert.assertEquals(1, fetched.size());
    Assert.assertEquals(expectedCatalog(), fetched.get(0).getCatName());
    PartitionSpecProxy proxy = client.listPartitionSpecs(dbName, tableName, -1);
    Assert.assertEquals(parts.length, proxy.size());
    Assert.assertEquals(expectedCatalog(), proxy.getCatName());
    fetched = client.listPartitionsByFilter(dbName, tableName, "partcol=\"a0\"", (short) -1);
    Assert.assertEquals(1, fetched.size());
    Assert.assertEquals(expectedCatalog(), fetched.get(0).getCatName());
    proxy = client.listPartitionSpecsByFilter(dbName, tableName, "partcol=\"a0\"", -1);
    Assert.assertEquals(1, proxy.size());
    Assert.assertEquals(expectedCatalog(), proxy.getCatName());
    Assert.assertEquals(1, client.getNumPartitionsByFilter(dbName, tableName, "partcol=\"a0\""));
    List<String> names = client.listPartitionNames(dbName, tableName, (short) 57);
    Assert.assertEquals(parts.length, names.size());
    names = client.listPartitionNames(dbName, tableName, Collections.singletonList("a0"), Short.MAX_VALUE);
    Assert.assertEquals(1, names.size());
    PartitionValuesRequest rqst = new PartitionValuesRequest(dbName, tableName, Lists.newArrayList(new FieldSchema("partcol", "string", "")));
    PartitionValuesResponse rsp = client.listPartitionValues(rqst);
    Assert.assertEquals(5, rsp.getPartitionValuesSize());
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionValuesRequest(org.apache.hadoop.hive.metastore.api.PartitionValuesRequest) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) PartitionValuesResponse(org.apache.hadoop.hive.metastore.api.PartitionValuesResponse) PartitionSpecProxy(org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy) Test(org.junit.Test)

Example 37 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestDropPartitions method otherCatalog.

@Test
@ConditionalIgnoreOnSessionHiveMetastoreClient
public void otherCatalog() throws TException {
    String catName = "drop_partition_catalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "drop_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    String tableName = "table_in_other_catalog";
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").create(client, metaStore.getConf());
    Partition[] parts = new Partition[2];
    for (int i = 0; i < parts.length; i++) {
        parts[i] = new PartitionBuilder().inTable(table).addValue("a" + i).build(metaStore.getConf());
    }
    client.add_partitions(Arrays.asList(parts));
    List<Partition> fetched = client.listPartitions(catName, dbName, tableName, (short) -1);
    Assert.assertEquals(parts.length, fetched.size());
    Assert.assertTrue(client.dropPartition(catName, dbName, tableName, Collections.singletonList("a0"), PartitionDropOptions.instance().ifExists(false)));
    try {
        client.getPartition(catName, dbName, tableName, Collections.singletonList("a0"));
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
    Assert.assertTrue(client.dropPartition(catName, dbName, tableName, "partcol=a1", true));
    try {
        client.getPartition(catName, dbName, tableName, Collections.singletonList("a1"));
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 38 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestDropPartitions method createTable.

protected Table createTable(String tableName, List<FieldSchema> partCols, Map<String, String> tableParams) throws Exception {
    String type = "MANAGED_TABLE";
    String location = metaStore.getWarehouseRoot() + "/" + tableName;
    if (tableParams != null) {
        type = (tableParams.getOrDefault("EXTERNAL", "FALSE").equalsIgnoreCase("TRUE")) ? "EXTERNAL_TABLE" : "MANAGED_TABLE";
        location = (type.equalsIgnoreCase("EXTERNAL_TABLE")) ? (metaStore.getExternalWarehouseRoot() + "/" + tableName) : (metaStore.getWarehouseRoot() + "/" + tableName);
    }
    Table table = new TableBuilder().setDbName(DB_NAME).setTableName(tableName).setType(type).addCol("test_id", "int", "test col id").addCol("test_value", "string", "test col value").setPartCols(partCols).setLocation(location).setTableParams(tableParams).create(client, metaStore.getConf());
    return table;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Example 39 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestForeignKey method createTableWithConstraints.

@Test
public void createTableWithConstraints() throws TException {
    String constraintName = "ctwckk";
    Table parentTable = testTables[0];
    Table table = new TableBuilder().setTableName("table_with_constraints").setDbName(parentTable.getDbName()).addCol("col1", "int").addCol("col2", "varchar(32)").build(metaStore.getConf());
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(parentTable).addColumn("col1").build(metaStore.getConf());
    client.addPrimaryKey(pk);
    List<SQLForeignKey> fk = new SQLForeignKeyBuilder().fromPrimaryKey(pk).onTable(table).addColumn("col1").setConstraintName(constraintName).build(metaStore.getConf());
    client.createTableWithConstraints(table, null, fk, null, null, null, null);
    ForeignKeysRequest rqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
    rqst.setCatName(table.getCatName());
    List<SQLForeignKey> fetched = client.getForeignKeys(rqst);
    Assert.assertEquals(fk, fetched);
}
Also used : SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLForeignKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 40 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestForeignKey method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[3] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_4").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Before(org.junit.Before)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15