Search in sources :

Example 36 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestGetAllTableConstraints method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "int").addCol("col3", "boolean").addCol("col4", "int").addCol("col5", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Before(org.junit.Before)

Example 37 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogs method catalogOperations.

@Test
public void catalogOperations() throws TException {
    String[] catNames = { "cat1", "cat2", "ADifferentName" };
    String[] description = { "a description", "super descriptive", null };
    String[] location = { MetaStoreTestUtils.getTestWarehouseDir("cat1"), MetaStoreTestUtils.getTestWarehouseDir("cat2"), MetaStoreTestUtils.getTestWarehouseDir("different") };
    for (int i = 0; i < catNames.length; i++) {
        Catalog cat = new CatalogBuilder().setName(catNames[i]).setLocation(location[i]).setDescription(description[i]).build();
        client.createCatalog(cat);
        File dir = new File(cat.getLocationUri());
        Assert.assertTrue(dir.exists() && dir.isDirectory());
    }
    for (int i = 0; i < catNames.length; i++) {
        Catalog cat = client.getCatalog(catNames[i]);
        Assert.assertTrue(catNames[i].equalsIgnoreCase(cat.getName()));
        Assert.assertEquals(description[i], cat.getDescription());
        Assert.assertEquals(location[i], cat.getLocationUri());
        Assert.assertTrue("Create time of catalog should be set", cat.isSetCreateTime());
        Assert.assertTrue("Create time of catalog should be non-zero", cat.getCreateTime() > 0);
        File dir = new File(cat.getLocationUri());
        Assert.assertTrue(dir.exists() && dir.isDirectory());
        // Make sure there's a default database associated with each catalog
        Database db = client.getDatabase(catNames[i], DEFAULT_DATABASE_NAME);
        Assert.assertEquals("file:" + cat.getLocationUri(), db.getLocationUri());
    }
    List<String> catalogs = client.getCatalogs();
    Assert.assertEquals(4, catalogs.size());
    catalogs.sort(Comparator.naturalOrder());
    List<String> expected = new ArrayList<>(catNames.length + 1);
    expected.add(Warehouse.DEFAULT_CATALOG_NAME);
    expected.addAll(Arrays.asList(catNames));
    expected.sort(Comparator.naturalOrder());
    for (int i = 0; i < catalogs.size(); i++) {
        Assert.assertTrue("Expected " + expected.get(i) + " actual " + catalogs.get(i), catalogs.get(i).equalsIgnoreCase(expected.get(i)));
    }
    // Update catalogs
    // Update location
    Catalog newCat = new Catalog(client.getCatalog(catNames[0]));
    String newLocation = MetaStoreTestUtils.getTestWarehouseDir("a_different_location");
    newCat.setLocationUri(newLocation);
    client.alterCatalog(catNames[0], newCat);
    Catalog fetchedNewCat = client.getCatalog(catNames[0]);
    Assert.assertEquals(newLocation, fetchedNewCat.getLocationUri());
    Assert.assertEquals(description[0], fetchedNewCat.getDescription());
    // Update description
    newCat = new Catalog(client.getCatalog(catNames[1]));
    String newDescription = "an even more descriptive description";
    newCat.setDescription(newDescription);
    client.alterCatalog(catNames[1], newCat);
    fetchedNewCat = client.getCatalog(catNames[1]);
    Assert.assertEquals(location[1], fetchedNewCat.getLocationUri());
    Assert.assertEquals(newDescription, fetchedNewCat.getDescription());
    for (int i = 0; i < catNames.length; i++) {
        client.dropCatalog(catNames[i]);
        File dir = new File(location[i]);
        Assert.assertFalse(dir.exists());
    }
    catalogs = client.getCatalogs();
    Assert.assertEquals(1, catalogs.size());
    Assert.assertTrue(catalogs.get(0).equalsIgnoreCase(Warehouse.DEFAULT_CATALOG_NAME));
}
Also used : CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) ArrayList(java.util.ArrayList) File(java.io.File) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 38 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestCatalogs method alterChangeName.

@Test(expected = InvalidOperationException.class)
public void alterChangeName() throws TException {
    String catName = "alter_change_name";
    String location = MetaStoreTestUtils.getTestWarehouseDir(catName);
    String description = "I have a bad feeling about this";
    new CatalogBuilder().setName(catName).setLocation(location).setDescription(description).create(client);
    Catalog newCat = client.getCatalog(catName);
    newCat.setName("you_may_call_me_tim");
    client.alterCatalog(catName, newCat);
}
Also used : CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 39 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestNotNullConstraint method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) Before(org.junit.Before)

Example 40 with Catalog

use of org.apache.hadoop.hive.metastore.api.Catalog in project hive by apache.

the class TestUniqueConstraint method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) Before(org.junit.Before)

Aggregations

Catalog (org.apache.hadoop.hive.metastore.api.Catalog)43 CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)34 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 Test (org.junit.Test)23 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)19 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)19 Database (org.apache.hadoop.hive.metastore.api.Database)18 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)15 Table (org.apache.hadoop.hive.metastore.api.Table)11 ArrayList (java.util.ArrayList)8 Before (org.junit.Before)8 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)6 File (java.io.File)4 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)4 ISchema (org.apache.hadoop.hive.metastore.api.ISchema)4 ISchemaBuilder (org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder)4 HashMap (java.util.HashMap)3 HashSet (java.util.HashSet)3 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3