Search in sources :

Example 96 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testFilterSinglePartition.

/**
 * Test filtering on table with single partition
 */
@Test
public void testFilterSinglePartition() throws Exception {
    String dbName = "filterdb";
    String tblName = "filtertbl";
    List<String> vals = new ArrayList<>(1);
    vals.add("p11");
    List<String> vals2 = new ArrayList<>(1);
    vals2.add("p12");
    List<String> vals3 = new ArrayList<>(1);
    vals3.add("p13");
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).create(client, conf);
    Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("c1", ColumnType.STRING_TYPE_NAME).addCol("c2", ColumnType.INT_TYPE_NAME).addPartCol("p1", ColumnType.STRING_TYPE_NAME).create(client, conf);
    tbl = client.getTable(dbName, tblName);
    add_partition(client, tbl, vals, "part1");
    add_partition(client, tbl, vals2, "part2");
    add_partition(client, tbl, vals3, "part3");
    checkFilter(client, dbName, tblName, "p1 = \"p12\"", 1);
    checkFilter(client, dbName, tblName, "p1 < \"p12\"", 1);
    checkFilter(client, dbName, tblName, "p1 > \"p12\"", 1);
    checkFilter(client, dbName, tblName, "p1 >= \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 <= \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 <> \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 like \"p1%\"", 3);
    checkFilter(client, dbName, tblName, "p1 like \"p%2\"", 1);
    client.dropTable(dbName, tblName);
    client.dropDatabase(dbName);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Example 97 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testAlterPartition.

@Test
public void testAlterPartition() throws Throwable {
    try {
        String dbName = "compdb";
        String tblName = "comptbl";
        List<String> vals = new ArrayList<>(2);
        vals.add("2008-07-01");
        vals.add("14");
        client.dropTable(dbName, tblName);
        silentDropDatabase(dbName);
        new DatabaseBuilder().setName(dbName).setDescription("Alter Partition Test database").create(client, conf);
        Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).addTableParam("test_param_1", "Use this for comments etc").addBucketCol("name").addSerdeParam(ColumnType.SERIALIZATION_FORMAT, "1").addPartCol("ds", ColumnType.STRING_TYPE_NAME).addPartCol("hr", ColumnType.INT_TYPE_NAME).create(client, conf);
        if (isThriftClient) {
            // the createTable() above does not update the location in the 'tbl'
            // object when the client is a thrift client and the code below relies
            // on the location being present in the 'tbl' object - so get the table
            // from the metastore
            tbl = client.getTable(dbName, tblName);
        }
        Partition part = new Partition();
        part.setDbName(dbName);
        part.setTableName(tblName);
        part.setValues(vals);
        part.setParameters(new HashMap<>());
        part.setSd(tbl.getSd());
        part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
        part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
        client.add_partition(part);
        Partition part2 = client.getPartition(dbName, tblName, part.getValues());
        part2.getParameters().put("retention", "10");
        part2.getSd().setNumBuckets(12);
        part2.getSd().getSerdeInfo().getParameters().put("abc", "1");
        client.alter_partition(dbName, tblName, part2, null);
        Partition part3 = client.getPartition(dbName, tblName, part.getValues());
        assertEquals("couldn't alter partition", part3.getParameters().get("retention"), "10");
        assertEquals("couldn't alter partition", part3.getSd().getSerdeInfo().getParameters().get("abc"), "1");
        assertEquals("couldn't alter partition", part3.getSd().getNumBuckets(), 12);
        client.dropTable(dbName, tblName);
        client.dropDatabase(dbName);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testPartition() failed.");
        throw e;
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 98 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class NonCatCallsWithCatalog method tablesList.

@Test
public void tablesList() throws TException {
    String dbName = "db_in_other_catalog";
    // For this one don't specify a location to make sure it gets put in the catalog directory
    Database db = new DatabaseBuilder().setName(dbName).build(conf);
    db.unsetCatalogName();
    client.createDatabase(db);
    String[] tableNames = new String[4];
    for (int i = 0; i < tableNames.length; i++) {
        tableNames[i] = "table_in_other_catalog_" + i;
        TableBuilder builder = new TableBuilder().inDb(db).setTableName(tableNames[i]).addCol("col1_" + i, ColumnType.STRING_TYPE_NAME).addCol("col2_" + i, ColumnType.INT_TYPE_NAME);
        if (i == 0) {
            builder.addTableParam("the_key", "the_value");
        }
        Table table = builder.build(conf);
        table.unsetCatName();
        client.createTable(table);
    }
    String filter = hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS + "the_key=\"the_value\"";
    List<String> fetchedNames = client.listTableNamesByFilter(dbName, filter, (short) -1);
    Assert.assertEquals(1, fetchedNames.size());
    Assert.assertEquals(tableNames[0], fetchedNames.get(0));
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Test(org.junit.Test)

Example 99 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class NonCatCallsWithCatalog method alterPartitions.

@Test
public void alterPartitions() throws TException {
    String dbName = "alter_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).build(conf);
    db.unsetCatalogName();
    client.createDatabase(db);
    String tableName = "table_in_other_catalog";
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").build(conf);
    table.unsetCatName();
    client.createTable(table);
    Partition[] parts = new Partition[5];
    for (int i = 0; i < 5; i++) {
        parts[i] = new PartitionBuilder().inTable(table).addValue("a" + i).setLocation(MetaStoreTestUtils.getTestWarehouseDir("b" + i)).build(conf);
        parts[i].unsetCatName();
    }
    client.add_partitions(Arrays.asList(parts));
    Partition newPart = client.getPartition(dbName, tableName, Collections.singletonList("a0"));
    newPart.getParameters().put("test_key", "test_value");
    client.alter_partition(dbName, tableName, newPart);
    Partition fetched = client.getPartition(dbName, tableName, Collections.singletonList("a0"));
    Assert.assertEquals("test_value", fetched.getParameters().get("test_key"));
    newPart = client.getPartition(dbName, tableName, Collections.singletonList("a1"));
    newPart.setLastAccessTime(3);
    Partition newPart1 = client.getPartition(dbName, tableName, Collections.singletonList("a2"));
    newPart1.getSd().setLocation(MetaStoreTestUtils.getTestWarehouseDir("somewhere"));
    client.alter_partitions(dbName, tableName, Arrays.asList(newPart, newPart1));
    fetched = client.getPartition(dbName, tableName, Collections.singletonList("a1"));
    Assert.assertEquals(3L, fetched.getLastAccessTime());
    fetched = client.getPartition(dbName, tableName, Collections.singletonList("a2"));
    Assert.assertTrue(fetched.getSd().getLocation().contains("somewhere"));
    newPart = client.getPartition(dbName, tableName, Collections.singletonList("a4"));
    newPart.getParameters().put("test_key", "test_value");
    EnvironmentContext ec = new EnvironmentContext();
    ec.setProperties(Collections.singletonMap("a", "b"));
    client.alter_partition(dbName, tableName, newPart, ec);
    fetched = client.getPartition(dbName, tableName, Collections.singletonList("a4"));
    Assert.assertEquals("test_value", fetched.getParameters().get("test_key"));
    client.dropDatabase(dbName, true, true, true);
}
Also used : EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Test(org.junit.Test)

Example 100 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class NonCatCallsWithCatalog method addPartitions.

@Test
public void addPartitions() throws TException {
    String dbName = "add_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).build(conf);
    db.unsetCatalogName();
    client.createDatabase(db);
    String tableName = "table_in_other_catalog";
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").build(conf);
    table.unsetCatName();
    client.createTable(table);
    Partition[] parts = new Partition[5];
    for (int i = 0; i < parts.length; i++) {
        parts[i] = new PartitionBuilder().inTable(table).addValue("a" + i).build(conf);
        parts[i].unsetCatName();
    }
    client.add_partition(parts[0]);
    Assert.assertEquals(2, client.add_partitions(Arrays.asList(parts[1], parts[2])));
    client.add_partitions(Arrays.asList(parts[3], parts[4]), true, false);
    for (int i = 0; i < parts.length; i++) {
        Partition fetched = client.getPartition(dbName, tableName, Collections.singletonList("a" + i));
        Assert.assertEquals(dbName, fetched.getDbName());
        Assert.assertEquals(tableName, fetched.getTableName());
        Assert.assertEquals(expectedCatalog(), fetched.getCatName());
    }
    client.dropDatabase(dbName, true, true, true);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Test(org.junit.Test)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15