Search in sources :

Example 26 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testAlterViewParititon.

@Test
public void testAlterViewParititon() throws Throwable {
    String dbName = "compdb";
    String tblName = "comptbl";
    String viewName = "compView";
    client.dropTable(dbName, tblName);
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).setDescription("Alter Partition Test database").create(client, conf);
    Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME).addCol("income", ColumnType.INT_TYPE_NAME).create(client, conf);
    if (isThriftClient) {
        // the createTable() above does not update the location in the 'tbl'
        // object when the client is a thrift client and the code below relies
        // on the location being present in the 'tbl' object - so get the table
        // from the metastore
        tbl = client.getTable(dbName, tblName);
    }
    ArrayList<FieldSchema> viewCols = new ArrayList<>(1);
    viewCols.add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    ArrayList<FieldSchema> viewPartitionCols = new ArrayList<>(1);
    viewPartitionCols.add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    Table view = new Table();
    view.setDbName(dbName);
    view.setTableName(viewName);
    view.setTableType(TableType.VIRTUAL_VIEW.name());
    view.setPartitionKeys(viewPartitionCols);
    view.setViewOriginalText("SELECT income, name FROM " + tblName);
    view.setViewExpandedText("SELECT `" + tblName + "`.`income`, `" + tblName + "`.`name` FROM `" + dbName + "`.`" + tblName + "`");
    view.setRewriteEnabled(false);
    StorageDescriptor viewSd = new StorageDescriptor();
    view.setSd(viewSd);
    viewSd.setCols(viewCols);
    viewSd.setCompressed(false);
    viewSd.setParameters(new HashMap<>());
    viewSd.setSerdeInfo(new SerDeInfo());
    viewSd.getSerdeInfo().setParameters(new HashMap<>());
    client.createTable(view);
    if (isThriftClient) {
        // the createTable() above does not update the location in the 'tbl'
        // object when the client is a thrift client and the code below relies
        // on the location being present in the 'tbl' object - so get the table
        // from the metastore
        view = client.getTable(dbName, viewName);
    }
    List<String> vals = new ArrayList<>(1);
    vals.add("abc");
    Partition part = new Partition();
    part.setDbName(dbName);
    part.setTableName(viewName);
    part.setValues(vals);
    part.setParameters(new HashMap<>());
    client.add_partition(part);
    Partition part2 = client.getPartition(dbName, viewName, part.getValues());
    part2.getParameters().put("a", "b");
    client.alter_partition(dbName, viewName, part2, null);
    Partition part3 = client.getPartition(dbName, viewName, part.getValues());
    assertEquals("couldn't view alter partition", part3.getParameters().get("a"), "b");
    client.dropTable(dbName, viewName);
    client.dropTable(dbName, tblName);
    client.dropDatabase(dbName);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Example 27 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testPartitionFilter.

/**
 * Tests for list partition by filter functionality.
 */
@Test
public void testPartitionFilter() throws Exception {
    String dbName = "filterdb";
    String tblName = "filtertbl";
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).create(client, conf);
    Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("c1", ColumnType.STRING_TYPE_NAME).addCol("c2", ColumnType.INT_TYPE_NAME).addPartCol("p1", ColumnType.STRING_TYPE_NAME).addPartCol("p2", ColumnType.STRING_TYPE_NAME).addPartCol("p3", ColumnType.INT_TYPE_NAME).create(client, conf);
    tbl = client.getTable(dbName, tblName);
    add_partition(client, tbl, Lists.newArrayList("p11", "p21", "31"), "part1");
    add_partition(client, tbl, Lists.newArrayList("p11", "p22", "32"), "part2");
    add_partition(client, tbl, Lists.newArrayList("p12", "p21", "31"), "part3");
    add_partition(client, tbl, Lists.newArrayList("p12", "p23", "32"), "part4");
    add_partition(client, tbl, Lists.newArrayList("p13", "p24", "31"), "part5");
    add_partition(client, tbl, Lists.newArrayList("p13", "p25", "-33"), "part6");
    // Test equals operator for strings and integers.
    checkFilter(client, dbName, tblName, "p1 = \"p11\"", 2);
    checkFilter(client, dbName, tblName, "p1 = \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p2 = \"p21\"", 2);
    checkFilter(client, dbName, tblName, "p2 = \"p23\"", 1);
    checkFilter(client, dbName, tblName, "p3 = 31", 3);
    checkFilter(client, dbName, tblName, "p3 = 33", 0);
    checkFilter(client, dbName, tblName, "p3 = -33", 1);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" and p2=\"p22\"", 1);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p2=\"p23\"", 3);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" and p3 = 31", 1);
    checkFilter(client, dbName, tblName, "p3 = -33 or p1 = \"p12\"", 3);
    // Test not-equals operator for strings and integers.
    checkFilter(client, dbName, tblName, "p1 != \"p11\"", 4);
    checkFilter(client, dbName, tblName, "p2 != \"p23\"", 5);
    checkFilter(client, dbName, tblName, "p2 != \"p33\"", 6);
    checkFilter(client, dbName, tblName, "p3 != 32", 4);
    checkFilter(client, dbName, tblName, "p3 != 8589934592", 6);
    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p1 != \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p2 != \"p22\"", 4);
    checkFilter(client, dbName, tblName, "p1 != \"p11\" or p2 != \"p22\"", 5);
    checkFilter(client, dbName, tblName, "p1 != \"p12\" and p2 != \"p25\"", 3);
    checkFilter(client, dbName, tblName, "p1 != \"p12\" or p2 != \"p25\"", 6);
    checkFilter(client, dbName, tblName, "p3 != -33 or p1 != \"p13\"", 5);
    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p3 = 31", 2);
    checkFilter(client, dbName, tblName, "p3 != 31 and p1 = \"p12\"", 1);
    // Test reverse order.
    checkFilter(client, dbName, tblName, "31 != p3 and p1 = \"p12\"", 1);
    checkFilter(client, dbName, tblName, "\"p23\" = p2", 1);
    // Test and/or more...
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\")", 3);
    checkFilter(client, dbName, tblName, "p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\") Or " + "(p1=\"p13\" aNd p2=\"p24\")", 4);
    // test for and or precedence
    checkFilter(client, dbName, tblName, "p1=\"p12\" and (p2=\"p27\" Or p2=\"p21\")", 1);
    checkFilter(client, dbName, tblName, "p1=\"p12\" and p2=\"p27\" Or p2=\"p21\"", 2);
    // Test gt/lt/lte/gte/like for strings.
    checkFilter(client, dbName, tblName, "p1 > \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 >= \"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 < \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p1 <= \"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 like \"p1%\"", 6);
    checkFilter(client, dbName, tblName, "p2 like \"p%3\"", 1);
    // Test gt/lt/lte/gte for numbers.
    checkFilter(client, dbName, tblName, "p3 < 0", 1);
    checkFilter(client, dbName, tblName, "p3 >= -33", 6);
    checkFilter(client, dbName, tblName, "p3 > -33", 5);
    checkFilter(client, dbName, tblName, "p3 > 31 and p3 < 32", 0);
    checkFilter(client, dbName, tblName, "p3 > 31 or p3 < 31", 3);
    checkFilter(client, dbName, tblName, "p3 > 30 or p3 < 30", 6);
    checkFilter(client, dbName, tblName, "p3 >= 31 or p3 < -32", 6);
    checkFilter(client, dbName, tblName, "p3 >= 32", 2);
    checkFilter(client, dbName, tblName, "p3 > 32", 0);
    // Test between
    checkFilter(client, dbName, tblName, "p1 between \"p11\" and \"p12\"", 4);
    checkFilter(client, dbName, tblName, "p1 not between \"p11\" and \"p12\"", 2);
    checkFilter(client, dbName, tblName, "p3 not between 0 and 2", 6);
    checkFilter(client, dbName, tblName, "p3 between 31 and 32", 5);
    checkFilter(client, dbName, tblName, "p3 between 32 and 31", 0);
    checkFilter(client, dbName, tblName, "p3 between -32 and 34 and p3 not between 31 and 32", 0);
    checkFilter(client, dbName, tblName, "p3 between 1 and 3 or p3 not between 1 and 3", 6);
    checkFilter(client, dbName, tblName, "p3 between 31 and 32 and p1 between \"p12\" and \"p14\"", 3);
    // Test for setting the maximum partition count
    List<Partition> partitions = client.listPartitionsByFilter(dbName, tblName, "p1 >= \"p12\"", (short) 2);
    assertEquals("User specified row limit for partitions", 2, partitions.size());
    // Negative tests
    Exception me = null;
    try {
        client.listPartitionsByFilter(dbName, tblName, "p3 >= \"p12\"", (short) -1);
    } catch (MetaException e) {
        me = e;
    }
    assertNotNull(me);
    assertTrue("Filter on int partition key", me.getMessage().contains("Filtering is supported only on partition keys of type string"));
    me = null;
    try {
        client.listPartitionsByFilter(dbName, tblName, "c1 >= \"p12\"", (short) -1);
    } catch (MetaException e) {
        me = e;
    }
    assertNotNull(me);
    assertTrue("Filter on invalid key", me.getMessage().contains("<c1> is not a partitioning key for the table"));
    me = null;
    try {
        client.listPartitionsByFilter(dbName, tblName, "c1 >= ", (short) -1);
    } catch (MetaException e) {
        me = e;
    }
    assertNotNull(me);
    assertTrue("Invalid filter string", me.getMessage().contains("Error parsing partition filter"));
    me = null;
    try {
        client.listPartitionsByFilter("invDBName", "invTableName", "p1 = \"p11\"", (short) -1);
    } catch (NoSuchObjectException e) {
        me = e;
    }
    assertNotNull(me);
    assertTrue("NoSuchObject exception", me.getMessage().contains("Specified catalog.database.table does not exist : hive.invdbname.invtablename"));
    client.dropTable(dbName, tblName);
    client.dropDatabase(dbName);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 28 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testCreateTableSettingId.

@Test
public void testCreateTableSettingId() throws Exception {
    String dbName = "createDb";
    String tblName = "createTbl";
    client.dropTable(dbName, tblName);
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).create(client, conf);
    Table table = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("foo", "string").addCol("bar", "string").build(conf);
    table.setId(1);
    client.createTable(table);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Example 29 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testGetSchemaWithNoClassDefFoundError.

@Test(expected = MetaException.class)
public void testGetSchemaWithNoClassDefFoundError() throws TException {
    String dbName = "testDb";
    String tblName = "testTable";
    client.dropTable(dbName, tblName);
    silentDropDatabase(dbName);
    new DatabaseBuilder().setName(dbName).create(client, conf);
    Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("name", ColumnType.STRING_TYPE_NAME, "").setSerdeLib("no.such.class").create(client, conf);
    client.getSchema(dbName, tblName);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Example 30 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetaStore method testConcurrentMetastores.

/**
 * Verify that if another  client, either a metastore Thrift server or  a Hive CLI instance
 * renames a table recently created by this instance, and hence potentially in its cache, the
 * current instance still sees the change.
 */
@Test
public void testConcurrentMetastores() throws Exception {
    String dbName = "concurrentdb";
    String tblName = "concurrenttbl";
    String renameTblName = "rename_concurrenttbl";
    try {
        cleanUp(dbName, tblName, null);
        createDb(dbName);
        Table tbl1 = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("c1", ColumnType.STRING_TYPE_NAME).addCol("c2", ColumnType.INT_TYPE_NAME).create(client, conf);
        // get the table from the client, verify the name is correct
        Table tbl2 = client.getTable(dbName, tblName);
        assertEquals("Client returned table with different name.", tbl2.getTableName(), tblName);
        // Simulate renaming via another metastore Thrift server or another Hive CLI instance
        updateTableNameInDB(tblName, renameTblName);
        // get the table from the client again, verify the name has been updated
        Table tbl3 = client.getTable(dbName, renameTblName);
        assertEquals("Client returned table with different name after rename.", tbl3.getTableName(), renameTblName);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testConcurrentMetastores() failed.");
        throw e;
    } finally {
        silentDropDatabase(dbName);
    }
}
Also used : SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15