Search in sources :

Example 21 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestPartitionManagement method createMetadata.

private List<String> createMetadata(String catName, String dbName, String tableName, List<String> partKeys, List<String> partKeyTypes, List<List<String>> partVals, Map<String, Column> colMap, boolean isOrc) throws TException {
    if (!DEFAULT_CATALOG_NAME.equals(catName)) {
        Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
        client.createCatalog(cat);
    }
    Database db;
    if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
        DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
        dbBuilder.setCatalogName(catName);
        db = dbBuilder.create(client, conf);
    } else {
        db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
    }
    TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
    if (isOrc) {
        tb.setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
    }
    for (Column col : colMap.values()) {
        tb.addCol(col.colName, col.colType);
    }
    if (partKeys != null) {
        if (partKeyTypes == null) {
            throw new IllegalArgumentException("partKeyTypes cannot be null when partKeys is non-null");
        }
        if (partKeys.size() != partKeyTypes.size()) {
            throw new IllegalArgumentException("partKeys and partKeyTypes size should be same");
        }
        if (partVals.isEmpty()) {
            throw new IllegalArgumentException("partVals cannot be empty for patitioned table");
        }
        for (int i = 0; i < partKeys.size(); i++) {
            tb.addPartCol(partKeys.get(i), partKeyTypes.get(i));
        }
    }
    Table table = tb.create(client, conf);
    if (partKeys != null) {
        for (List<String> partVal : partVals) {
            new PartitionBuilder().inTable(table).setValues(partVal).addToTable(client, conf);
        }
    }
    List<String> partNames = new ArrayList<>();
    if (partKeys != null) {
        for (int i = 0; i < partKeys.size(); i++) {
            String partKey = partKeys.get(i);
            for (String partVal : partVals.get(i)) {
                String partName = partKey + "=" + partVal;
                partNames.add(partName);
            }
        }
    }
    client.flushCache();
    return partNames;
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 22 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestStats method createMetadata.

private List<String> createMetadata(String catName, String dbName, String tableName, String partKey, List<String> partVals, Map<String, Column> colMap) throws TException {
    if (!DEFAULT_CATALOG_NAME.equals(catName) && !NO_CAT.equals(catName)) {
        Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
        client.createCatalog(cat);
    }
    Database db;
    if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
        DatabaseBuilder dbBuilder = new DatabaseBuilder().setName(dbName);
        if (!NO_CAT.equals(catName))
            dbBuilder.setCatalogName(catName);
        db = dbBuilder.create(client, conf);
    } else {
        db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
    }
    TableBuilder tb = new TableBuilder().inDb(db).setTableName(tableName);
    for (Column col : colMap.values()) tb.addCol(col.colName, col.colType);
    if (partKey != null) {
        assert partVals != null && !partVals.isEmpty() : "Must provide partition values for partitioned table";
        tb.addPartCol(partKey, ColumnType.STRING_TYPE_NAME);
    }
    Table table = tb.create(client, conf);
    if (partKey != null) {
        for (String partVal : partVals) {
            new PartitionBuilder().inTable(table).addValue(partVal).addToTable(client, conf);
        }
    }
    SetPartitionsStatsRequest rqst = new SetPartitionsStatsRequest();
    List<String> partNames = new ArrayList<>();
    if (partKey == null) {
        rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, null, colMap.values()));
    } else {
        for (String partVal : partVals) {
            String partName = partKey + "=" + partVal;
            rqst.addToColStats(buildStatsForOneTableOrPartition(catName, dbName, tableName, partName, colMap.values()));
            partNames.add(partName);
        }
    }
    rqst.setEngine(ENGINE);
    client.setPartitionColumnStatistics(rqst);
    return partNames;
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) SetPartitionsStatsRequest(org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog)

Example 23 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogs method dropNonEmptyCatalog.

@Test(expected = InvalidOperationException.class)
public void dropNonEmptyCatalog() throws TException {
    String catName = "toBeDropped";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "dontDropMe";
    new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    client.dropCatalog(catName);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 24 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogs method alterNonExistentCatalog.

@Test(expected = NoSuchObjectException.class)
public void alterNonExistentCatalog() throws TException {
    String catName = "alter_no_such_catalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.alterCatalog(catName, cat);
}
Also used : CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 25 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestDatabases method databasesInCatalogs.

@Test
public void databasesInCatalogs() throws TException, URISyntaxException {
    String catName = "mycatalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String[] dbNames = { "db1", "db9" };
    Database[] dbs = new Database[2];
    // For this one don't specify a location to make sure it gets put in the catalog directory
    dbs[0] = new DatabaseBuilder().setName(dbNames[0]).setCatalogName(catName).create(client, metaStore.getConf());
    // For the second one, explicitly set a location to make sure it ends up in the specified place.
    String db1Location = MetaStoreTestUtils.getTestWarehouseDir(dbNames[1]);
    dbs[1] = new DatabaseBuilder().setName(dbNames[1]).setCatalogName(catName).setLocation(db1Location).create(client, metaStore.getConf());
    Database fetched = client.getDatabase(catName, dbNames[0]);
    String expectedLocation = new File(cat.getLocationUri(), dbNames[0] + ".db").toURI().toString();
    Assert.assertEquals(expectedLocation, fetched.getLocationUri() + "/");
    String db0Location = new URI(fetched.getLocationUri()).getPath();
    File dir = new File(db0Location);
    Assert.assertTrue(dir.exists() && dir.isDirectory());
    fetched = client.getDatabase(catName, dbNames[1]);
    Assert.assertEquals(new File(db1Location).toURI().toString(), fetched.getLocationUri() + "/");
    dir = new File(new URI(fetched.getLocationUri()).getPath());
    Assert.assertTrue(dir.exists() && dir.isDirectory());
    Set<String> fetchedDbs = new HashSet<>(client.getAllDatabases(catName));
    Assert.assertEquals(3, fetchedDbs.size());
    for (String dbName : dbNames) Assert.assertTrue(fetchedDbs.contains(dbName));
    fetchedDbs = new HashSet<>(client.getAllDatabases());
    Assert.assertEquals(5, fetchedDbs.size());
    Assert.assertTrue(fetchedDbs.contains(Warehouse.DEFAULT_DATABASE_NAME));
    // Intentionally using the deprecated method to make sure it returns correct results.
    fetchedDbs = new HashSet<>(client.getAllDatabases());
    Assert.assertEquals(5, fetchedDbs.size());
    Assert.assertTrue(fetchedDbs.contains(Warehouse.DEFAULT_DATABASE_NAME));
    fetchedDbs = new HashSet<>(client.getDatabases(catName, "d*"));
    Assert.assertEquals(3, fetchedDbs.size());
    for (String dbName : dbNames) Assert.assertTrue(fetchedDbs.contains(dbName));
    fetchedDbs = new HashSet<>(client.getDatabases("d*"));
    Assert.assertEquals(1, fetchedDbs.size());
    Assert.assertTrue(fetchedDbs.contains(Warehouse.DEFAULT_DATABASE_NAME));
    // Intentionally using the deprecated method to make sure it returns correct results.
    fetchedDbs = new HashSet<>(client.getDatabases("d*"));
    Assert.assertEquals(1, fetchedDbs.size());
    Assert.assertTrue(fetchedDbs.contains(Warehouse.DEFAULT_DATABASE_NAME));
    fetchedDbs = new HashSet<>(client.getDatabases(catName, "*1"));
    Assert.assertEquals(1, fetchedDbs.size());
    Assert.assertTrue(fetchedDbs.contains(dbNames[0]));
    fetchedDbs = new HashSet<>(client.getDatabases("*9"));
    Assert.assertEquals(0, fetchedDbs.size());
    // Intentionally using the deprecated method to make sure it returns correct results.
    fetchedDbs = new HashSet<>(client.getDatabases("*9"));
    Assert.assertEquals(0, fetchedDbs.size());
    fetchedDbs = new HashSet<>(client.getDatabases(catName, "*x"));
    Assert.assertEquals(0, fetchedDbs.size());
    // Check that dropping database from wrong catalog fails
    try {
        client.dropDatabase(dbNames[0], true, false, false);
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Check that dropping database from wrong catalog fails
    try {
        // Intentionally using deprecated method
        client.dropDatabase(dbNames[0], true, false, false);
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Drop them from the proper catalog
    client.dropDatabase(catName, dbNames[0], true, false, false);
    dir = new File(db0Location);
    Assert.assertFalse(dir.exists());
    client.dropDatabase(catName, dbNames[1], true, false, false);
    dir = new File(db1Location);
    Assert.assertFalse(dir.exists());
    fetchedDbs = new HashSet<>(client.getAllDatabases(catName));
    Assert.assertEquals(1, fetchedDbs.size());
}
Also used : URI(java.net.URI) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

Catalog (org.apache.hadoop.hive.metastore.api.Catalog)43 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)34 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 Test (org.junit.Test)23 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)19 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)19 Database (org.apache.hadoop.hive.metastore.api.Database)18 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)15 Table (org.apache.hadoop.hive.metastore.api.Table)11 ArrayList (java.util.ArrayList)8 Before (org.junit.Before)8 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)6 File (java.io.File)4 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)4 ISchema (org.apache.hadoop.hive.metastore.api.ISchema)4 ISchemaBuilder (org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder)4 HashMap (java.util.HashMap)3 HashSet (java.util.HashSet)3 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3