Search in sources :

Example 1 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestRetryingHMSHandler method testRetryingHMSHandler.

// Create a database and a table in that database.  Because the AlternateFailurePreListener is
// being used each attempt to create something should require two calls by the RetryingHMSHandler
@Test
public void testRetryingHMSHandler() throws Exception {
    String dbName = "hive4159";
    String tblName = "tmptbl";
    Database db = new Database();
    db.setName(dbName);
    msc.createDatabase(db);
    Assert.assertEquals(2, AlternateFailurePreListener.getCallCount());
    Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("c1", ColumnType.STRING_TYPE_NAME).build();
    msc.createTable(tbl);
    Assert.assertEquals(4, AlternateFailurePreListener.getCallCount());
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 2 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestAddPartitions method createTable.

private Table createTable(String dbName, String tableName, List<FieldSchema> partCols, String location) throws Exception {
    Table table = new TableBuilder().setDbName(dbName).setTableName(tableName).addCol("test_id", "int", "test col id").addCol("test_value", "string", "test col value").addTableParam("partTestTableParamKey", "partTestTableParamValue").setPartCols(partCols).addStorageDescriptorParam("partTestSDParamKey", "partTestSDParamValue").setSerdeName(tableName).setStoredAsSubDirectories(false).addSerdeParam("partTestSerdeParamKey", "partTestSerdeParamValue").setLocation(location).build();
    client.createTable(table);
    return client.getTable(dbName, tableName);
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder)

Example 3 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestHiveMetastoreTransformer method createTableWithCapabilities.

private Table createTableWithCapabilities(Map<String, Object> props) throws Exception {
    String catalog = (String) props.getOrDefault("CATALOG", MetaStoreUtils.getDefaultCatalog(conf));
    String dbName = (String) props.getOrDefault("DBNAME", "simpdb");
    String tblName = (String) props.getOrDefault("TBLNAME", "test_table");
    TableType type = (TableType) props.getOrDefault("TBLTYPE", TableType.MANAGED_TABLE);
    int buckets = ((Integer) props.getOrDefault("BUCKETS", -1)).intValue();
    String properties = (String) props.getOrDefault("PROPERTIES", "");
    String location = (String) (props.get("LOCATION"));
    boolean dropDb = ((Boolean) props.getOrDefault("DROPDB", Boolean.TRUE)).booleanValue();
    int partitionCount = ((Integer) props.getOrDefault("PARTITIONS", 0)).intValue();
    final String typeName = "Person";
    if (type == TableType.EXTERNAL_TABLE) {
        if (!properties.contains("EXTERNAL=TRUE")) {
            properties.concat(";EXTERNAL=TRUE;");
        }
    }
    Map<String, String> table_params = new HashMap();
    if (properties.length() > 0) {
        String[] propArray = properties.split(";");
        for (String prop : propArray) {
            String[] keyValue = prop.split("=");
            table_params.put(keyValue[0], keyValue[1]);
        }
    }
    Catalog cat = null;
    try {
        cat = client.getCatalog(catalog);
    } catch (NoSuchObjectException e) {
        LOG.info("Catalog does not exist, creating a new one");
        try {
            if (cat == null) {
                cat = new Catalog();
                cat.setName(catalog.toLowerCase());
                Warehouse wh = new Warehouse(conf);
                cat.setLocationUri(wh.getWhRootExternal().toString() + File.separator + catalog);
                cat.setDescription("Non-hive catalog");
                client.createCatalog(cat);
                LOG.info("Catalog " + catalog + " created");
            }
        } catch (Exception ce) {
            LOG.warn("Catalog " + catalog + " could not be created");
        }
    } catch (Exception e) {
        LOG.error("Creation of a new catalog failed, aborting test");
        throw e;
    }
    try {
        client.dropTable(dbName, tblName);
    } catch (Exception e) {
        LOG.info("Drop table failed for " + dbName + "." + tblName);
    }
    try {
        if (dropDb)
            silentDropDatabase(dbName);
    } catch (Exception e) {
        LOG.info("Drop database failed for " + dbName);
    }
    if (dropDb)
        new DatabaseBuilder().setName(dbName).setCatalogName(catalog).create(client, conf);
    try {
        client.dropType(typeName);
    } catch (Exception e) {
        LOG.info("Drop type failed for " + typeName);
    }
    Type typ1 = new Type();
    typ1.setName(typeName);
    typ1.setFields(new ArrayList<>(2));
    typ1.getFields().add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
    typ1.getFields().add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
    client.createType(typ1);
    TableBuilder builder = new TableBuilder().setCatName(catalog).setDbName(dbName).setTableName(tblName).setCols(typ1.getFields()).setType(type.name()).setLocation(location).setNumBuckets(buckets).setTableParams(table_params).addBucketCol("name").addStorageDescriptorParam("test_param_1", "Use this for comments etc");
    if (location != null)
        builder.setLocation(location);
    if (buckets > 0)
        builder.setNumBuckets(buckets).addBucketCol("name");
    if (partitionCount > 0) {
        builder.addPartCol("partcol", "string");
    }
    if (type == TableType.MANAGED_TABLE) {
        if (properties.contains("transactional=true") && !properties.contains("transactional_properties=insert_only")) {
            builder.setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
            builder.setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
            builder.setSerdeLib("org.apache.hadoop.hive.ql.io.orc.OrcSerde");
            builder.addStorageDescriptorParam("inputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
            builder.addStorageDescriptorParam("outputFormat", "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
        }
    }
    Table tbl = builder.create(client, conf);
    LOG.info("Table " + tbl.getTableName() + " created:type=" + tbl.getTableType());
    if (partitionCount > 0) {
        List<Partition> partitions = new ArrayList<>();
        List<List<String>> partValues = new ArrayList<>();
        for (int i = 1; i <= partitionCount; i++) {
            partValues.add(Lists.newArrayList("" + i));
        }
        for (List<String> vals : partValues) {
            addPartition(client, tbl, vals);
        }
    }
    if (isThriftClient) {
        // the createTable() above does not update the location in the 'tbl'
        // object when the client is a thrift client and the code below relies
        // on the location being present in the 'tbl' object - so get the table
        // from the metastore
        tbl = client.getTable(catalog, dbName, tblName);
        LOG.info("Fetched Table " + tbl.getTableName() + " created:type=" + tbl.getTableType());
    }
    return tbl;
}
Also used : HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) ArrayList(java.util.ArrayList) List(java.util.List) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Type(org.apache.hadoop.hive.metastore.api.Type) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 4 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestSchemaToolCatalogOps method moveTableWithinCatalog.

@Test
public void moveTableWithinCatalog() throws TException, HiveMetaException {
    String toDbName = "moveTableWithinCatalogDb";
    String tableName = "moveTableWithinCatalogTable";
    String partVal = "moveTableWithinCatalogKey";
    new DatabaseBuilder().setName(toDbName).create(client, conf);
    Table table = new TableBuilder().setTableName(tableName).addCol("a", "int").addPartCol("p", "string").create(client, conf);
    new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
    String argsMoveTable = String.format("-moveTable %s -fromCatalog %s -toCatalog %s -fromDatabase %s -toDatabase %s", tableName, DEFAULT_CATALOG_NAME, DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME, toDbName);
    execute(new SchemaToolTaskMoveTable(), argsMoveTable);
    Table fetchedTable = client.getTable(DEFAULT_CATALOG_NAME, toDbName, tableName);
    Assert.assertNotNull(fetchedTable);
    Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedTable.getCatName());
    Assert.assertEquals(toDbName.toLowerCase(), fetchedTable.getDbName());
    Partition fetchedPart = client.getPartition(DEFAULT_CATALOG_NAME, toDbName, tableName, Collections.singletonList(partVal));
    Assert.assertNotNull(fetchedPart);
    Assert.assertEquals(DEFAULT_CATALOG_NAME, fetchedPart.getCatName());
    Assert.assertEquals(toDbName.toLowerCase(), fetchedPart.getDbName());
    Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Example 5 with TableBuilder

use of org.apache.hadoop.hive.metastore.client.builder.TableBuilder in project hive by apache.

the class TestSchemaToolCatalogOps method moveDatabase.

@Test
public void moveDatabase() throws HiveMetaException, TException {
    String toCatName = "moveDbCat";
    String dbName = "moveDbDb";
    String tableName = "moveDbTable";
    String funcName = "movedbfunc";
    String partVal = "moveDbKey";
    new CatalogBuilder().setName(toCatName).setLocation("file:///tmp").create(client);
    Database db = new DatabaseBuilder().setCatalogName(DEFAULT_CATALOG_NAME).setName(dbName).create(client, conf);
    new FunctionBuilder().inDb(db).setName(funcName).setClass("org.apache.hive.myudf").create(client, conf);
    Table table = new TableBuilder().inDb(db).setTableName(tableName).addCol("a", "int").addPartCol("p", "string").create(client, conf);
    new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
    String argsMoveDB = String.format("-moveDatabase %s -fromCatalog %s -toCatalog %s", dbName, DEFAULT_CATALOG_NAME, toCatName);
    execute(new SchemaToolTaskMoveDatabase(), argsMoveDB);
    Database fetchedDb = client.getDatabase(toCatName, dbName);
    Assert.assertNotNull(fetchedDb);
    Assert.assertEquals(toCatName.toLowerCase(), fetchedDb.getCatalogName());
    Function fetchedFunction = client.getFunction(toCatName, dbName, funcName);
    Assert.assertNotNull(fetchedFunction);
    Assert.assertEquals(toCatName.toLowerCase(), fetchedFunction.getCatName());
    Assert.assertEquals(dbName.toLowerCase(), fetchedFunction.getDbName());
    Table fetchedTable = client.getTable(toCatName, dbName, tableName);
    Assert.assertNotNull(fetchedTable);
    Assert.assertEquals(toCatName.toLowerCase(), fetchedTable.getCatName());
    Assert.assertEquals(dbName.toLowerCase(), fetchedTable.getDbName());
    Partition fetchedPart = client.getPartition(toCatName, dbName, tableName, Collections.singletonList(partVal));
    Assert.assertNotNull(fetchedPart);
    Assert.assertEquals(toCatName.toLowerCase(), fetchedPart.getCatName());
    Assert.assertEquals(dbName.toLowerCase(), fetchedPart.getDbName());
    Assert.assertEquals(tableName.toLowerCase(), fetchedPart.getTableName());
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Function(org.apache.hadoop.hive.metastore.api.Function) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) FunctionBuilder(org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Test(org.junit.Test)

Aggregations

TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)136 Table (org.apache.hadoop.hive.metastore.api.Table)111 Test (org.junit.Test)92 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)81 Database (org.apache.hadoop.hive.metastore.api.Database)40 Partition (org.apache.hadoop.hive.metastore.api.Partition)36 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)35 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)33 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)31 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)30 ArrayList (java.util.ArrayList)28 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)27 SourceTable (org.apache.hadoop.hive.metastore.api.SourceTable)25 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)23 Path (org.apache.hadoop.fs.Path)19 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)19 Type (org.apache.hadoop.hive.metastore.api.Type)19 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)17 TException (org.apache.thrift.TException)16 IOException (java.io.IOException)15