Search in sources :

Example 31 with CatalogBuilder

use of org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder in project hive by apache.

the class TestCatalogCaching method createObjectStore.

@Before
public void createObjectStore() throws MetaException, InvalidOperationException {
    conf = MetastoreConf.newMetastoreConf();
    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
    MetaStoreTestUtils.setConfForStandloneMode(conf);
    objectStore = new ObjectStore();
    objectStore.setConf(conf);
    // Create three catalogs
    HMSHandler.createDefaultCatalog(objectStore, new Warehouse(conf));
    Catalog cat1 = new CatalogBuilder().setName(CAT1_NAME).setLocation("/tmp/cat1").build();
    objectStore.createCatalog(cat1);
    Catalog cat2 = new CatalogBuilder().setName(CAT2_NAME).setLocation("/tmp/cat2").build();
    objectStore.createCatalog(cat2);
}
Also used : ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Before(org.junit.Before)

Example 32 with CatalogBuilder

use of org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder in project hive by apache.

the class TestAppendPartitions method otherCatalog.

@Test
@ConditionalIgnoreOnSessionHiveMetastoreClient
public void otherCatalog() throws TException {
    String catName = "append_partition_catalog";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "append_partition_database_in_other_catalog";
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    String tableName = "table_in_other_catalog";
    new TableBuilder().inDb(db).setTableName(tableName).addCol("id", "int").addCol("name", "string").addPartCol("partcol", "string").create(client, metaStore.getConf());
    Partition created = client.appendPartition(catName, dbName, tableName, Collections.singletonList("a1"));
    Assert.assertEquals(1, created.getValuesSize());
    Assert.assertEquals("a1", created.getValues().get(0));
    Partition fetched = client.getPartition(catName, dbName, tableName, Collections.singletonList("a1"));
    created.setWriteId(fetched.getWriteId());
    Assert.assertEquals(created, fetched);
    created = client.appendPartition(catName, dbName, tableName, "partcol=a2");
    Assert.assertEquals(1, created.getValuesSize());
    Assert.assertEquals("a2", created.getValues().get(0));
    fetched = client.getPartition(catName, dbName, tableName, Collections.singletonList("a2"));
    created.setWriteId(fetched.getWriteId());
    Assert.assertEquals(created, fetched);
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) Partition(org.apache.hadoop.hive.metastore.api.Partition) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 33 with CatalogBuilder

use of org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder in project hive by apache.

the class TestCheckConstraint method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    // For this one don't specify a location to make sure it gets put in the catalog directory
    inOtherCatalog = new DatabaseBuilder().setName(DATABASE_IN_OTHER_CATALOG).setCatalogName(OTHER_CATALOG).create(client, metaStore.getConf());
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    testTables[2] = new TableBuilder().inDb(inOtherCatalog).setTableName("test_table_3").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Before(org.junit.Before)

Example 34 with CatalogBuilder

use of org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder in project hive by apache.

the class TestTablesCreateDropAlterTruncate method tablesInOtherCatalogs.

@Test
public void tablesInOtherCatalogs() throws TException, URISyntaxException {
    String catName = "create_etc_tables_in_other_catalogs";
    Catalog cat = new CatalogBuilder().setName(catName).setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName)).build();
    client.createCatalog(cat);
    String dbName = "db_in_other_catalog";
    // For this one don't specify a location to make sure it gets put in the catalog directory
    Database db = new DatabaseBuilder().setName(dbName).setCatalogName(catName).create(client, metaStore.getConf());
    Table table = new TableBuilder().inDb(db).setTableName("mvSource").addCol("col1_1", ColumnType.STRING_TYPE_NAME).addCol("col2_2", ColumnType.INT_TYPE_NAME).build(metaStore.getConf());
    client.createTable(table);
    SourceTable sourceTable = createSourceTable(table);
    String[] tableNames = new String[4];
    for (int i = 0; i < tableNames.length; i++) {
        tableNames[i] = "table_in_other_catalog_" + i;
        TableBuilder builder = new TableBuilder().inDb(db).setTableName(tableNames[i]).addCol("col1_" + i, ColumnType.STRING_TYPE_NAME).addCol("col2_" + i, ColumnType.INT_TYPE_NAME);
        // Make one have a non-standard location
        if (i == 0) {
            builder.setLocation(MetaStoreTestUtils.getTestWarehouseDir(tableNames[i]));
        }
        // Make one partitioned
        if (i == 2) {
            builder.addPartCol("pcol1", ColumnType.STRING_TYPE_NAME);
        }
        // Make one a materialized view
        if (i == 3) {
            builder.setType(TableType.MATERIALIZED_VIEW.name()).setRewriteEnabled(true).addMaterializedViewReferencedTable(sourceTable);
        }
        client.createTable(builder.build(metaStore.getConf()));
    }
    // Add partitions for the partitioned table
    String[] partVals = new String[3];
    Table partitionedTable = client.getTable(catName, dbName, tableNames[2]);
    for (int i = 0; i < partVals.length; i++) {
        partVals[i] = "part" + i;
        new PartitionBuilder().inTable(partitionedTable).addValue(partVals[i]).addToTable(client, metaStore.getConf());
    }
    // Get tables, make sure the locations are correct
    for (int i = 0; i < tableNames.length; i++) {
        Table t = client.getTable(catName, dbName, tableNames[i]);
        Assert.assertEquals(catName, t.getCatName());
        String expectedLocation = (i < 1) ? new File(MetaStoreTestUtils.getTestWarehouseDir(tableNames[i])).toURI().toString() : new File(cat.getLocationUri() + File.separatorChar + dbName + ".db", tableNames[i]).toURI().toString();
        Assert.assertEquals(expectedLocation, t.getSd().getLocation() + "/");
        File dir = new File(new URI(t.getSd().getLocation()).getPath());
        Assert.assertTrue(dir.exists() && dir.isDirectory());
    }
    // Make sure getting table in the wrong catalog does not work
    try {
        Table t = client.getTable(DEFAULT_DATABASE_NAME, tableNames[0]);
        Assert.fail();
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // test getAllTables
    Set<String> fetchedNames = new HashSet<>(client.getAllTables(catName, dbName));
    Assert.assertEquals(tableNames.length + 1, fetchedNames.size());
    for (String tableName : tableNames) {
        Assert.assertTrue(fetchedNames.contains(tableName));
    }
    fetchedNames = new HashSet<>(client.getAllTables(DEFAULT_DATABASE_NAME));
    for (String tableName : tableNames) {
        Assert.assertFalse(fetchedNames.contains(tableName));
    }
    // test getMaterializedViewsForRewriting
    List<String> materializedViews = client.getMaterializedViewsForRewriting(catName, dbName);
    Assert.assertEquals(1, materializedViews.size());
    Assert.assertEquals(tableNames[3], materializedViews.get(0));
    fetchedNames = new HashSet<>(client.getMaterializedViewsForRewriting(DEFAULT_DATABASE_NAME));
    Assert.assertFalse(fetchedNames.contains(tableNames[3]));
    // test getTableObjectsByName
    List<Table> fetchedTables = client.getTableObjectsByName(catName, dbName, Arrays.asList(tableNames[0], tableNames[1]));
    Assert.assertEquals(2, fetchedTables.size());
    Collections.sort(fetchedTables);
    Assert.assertEquals(tableNames[0], fetchedTables.get(0).getTableName());
    Assert.assertEquals(tableNames[1], fetchedTables.get(1).getTableName());
    fetchedTables = client.getTableObjectsByName(DEFAULT_DATABASE_NAME, Arrays.asList(tableNames[0], tableNames[1]));
    Assert.assertEquals(0, fetchedTables.size());
    // Test altering the table
    Table t = client.getTable(catName, dbName, tableNames[0]).deepCopy();
    t.getParameters().put("test", "test");
    client.alter_table(catName, dbName, tableNames[0], t);
    t = client.getTable(catName, dbName, tableNames[0]).deepCopy();
    Assert.assertEquals("test", t.getParameters().get("test"));
    // Alter a table in the wrong catalog
    try {
        client.alter_table(DEFAULT_DATABASE_NAME, tableNames[0], t);
        Assert.fail();
    } catch (InvalidOperationException e) {
    // NOP
    }
    // Update the metadata for the materialized view
    CreationMetadata cm = client.getTable(catName, dbName, tableNames[3]).getCreationMetadata();
    Table table1 = new TableBuilder().inDb(db).setTableName("mvSource2").addCol("col1_1", ColumnType.STRING_TYPE_NAME).addCol("col2_2", ColumnType.INT_TYPE_NAME).build(metaStore.getConf());
    client.createTable(table1);
    sourceTable = createSourceTable(table1);
    cm.addToTablesUsed(TableName.getDbTable(sourceTable.getTable().getDbName(), sourceTable.getTable().getTableName()));
    cm.addToSourceTables(sourceTable);
    cm.unsetMaterializationTime();
    client.updateCreationMetadata(catName, dbName, tableNames[3], cm);
    List<String> partNames = new ArrayList<>();
    for (String partVal : partVals) {
        partNames.add("pcol1=" + partVal);
    }
    // Truncate a table
    client.truncateTable(catName, dbName, tableNames[0], partNames);
    // Truncate a table in the wrong catalog
    try {
        client.truncateTable(DEFAULT_DATABASE_NAME, tableNames[0], partNames);
        Assert.fail();
    } catch (NoSuchObjectException | TApplicationException e) {
    // NOP
    }
    // Drop a table from the wrong catalog
    try {
        client.dropTable(DEFAULT_DATABASE_NAME, tableNames[0], true, false);
        Assert.fail();
    } catch (NoSuchObjectException | TApplicationException e) {
    // NOP
    }
    // Should ignore the failure
    client.dropTable(DEFAULT_DATABASE_NAME, tableNames[0], false, true);
    // Have to do this in reverse order so that we drop the materialized view first.
    for (int i = tableNames.length - 1; i >= 0; i--) {
        t = client.getTable(catName, dbName, tableNames[i]);
        File tableDir = new File(new URI(t.getSd().getLocation()).getPath());
        Assert.assertTrue(tableDir.exists() && tableDir.isDirectory());
        if (tableNames[i].equalsIgnoreCase(tableNames[0])) {
            client.dropTable(catName, dbName, tableNames[i], false, false);
            Assert.assertTrue(tableDir.exists() && tableDir.isDirectory());
        } else {
            client.dropTable(catName, dbName, tableNames[i]);
            Assert.assertFalse(tableDir.exists());
        }
    }
    client.dropTable(table.getCatName(), table.getDbName(), table.getTableName());
    client.dropTable(table1.getCatName(), table1.getDbName(), table1.getTableName());
    Assert.assertEquals(0, client.getAllTables(catName, dbName).size());
}
Also used : SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) TestHiveMetaStore.createSourceTable(org.apache.hadoop.hive.metastore.TestHiveMetaStore.createSourceTable) ArrayList(java.util.ArrayList) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TestHiveMetaStore.createSourceTable(org.apache.hadoop.hive.metastore.TestHiveMetaStore.createSourceTable) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) URI(java.net.URI) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) TApplicationException(org.apache.thrift.TApplicationException) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CreationMetadata(org.apache.hadoop.hive.metastore.api.CreationMetadata) PartitionBuilder(org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Database(org.apache.hadoop.hive.metastore.api.Database) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) File(java.io.File) HashSet(java.util.HashSet) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 35 with CatalogBuilder

use of org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder in project hive by apache.

the class TestGetAllTableConstraints method setUp.

@Before
public void setUp() throws Exception {
    // Get new client
    client = metaStore.getClient();
    // Clean up the database
    client.dropDatabase(OTHER_DATABASE, true, true, true);
    // Drop every table in the default database
    for (String tableName : client.getAllTables(DEFAULT_DATABASE_NAME)) {
        client.dropTable(DEFAULT_DATABASE_NAME, tableName, true, true, true);
    }
    client.dropDatabase(OTHER_CATALOG, DATABASE_IN_OTHER_CATALOG, true, true, true);
    try {
        client.dropCatalog(OTHER_CATALOG);
    } catch (NoSuchObjectException e) {
    // NOP
    }
    // Clean up trash
    metaStore.cleanWarehouseDirs();
    new DatabaseBuilder().setName(OTHER_DATABASE).create(client, metaStore.getConf());
    Catalog cat = new CatalogBuilder().setName(OTHER_CATALOG).setLocation(MetaStoreTestUtils.getTestWarehouseDir(OTHER_CATALOG)).build();
    client.createCatalog(cat);
    testTables[0] = new TableBuilder().setTableName("test_table_1").addCol("col1", "int").addCol("col2", "int").addCol("col3", "boolean").addCol("col4", "int").addCol("col5", "varchar(32)").create(client, metaStore.getConf());
    testTables[1] = new TableBuilder().setDbName(OTHER_DATABASE).setTableName("test_table_2").addCol("col1", "int").addCol("col2", "varchar(32)").create(client, metaStore.getConf());
    // Reload tables from the MetaStore
    for (int i = 0; i < testTables.length; i++) {
        testTables[i] = client.getTable(testTables[i].getCatName(), testTables[i].getDbName(), testTables[i].getTableName());
    }
}
Also used : DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Before(org.junit.Before)

Aggregations

CatalogBuilder (org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder)43 Catalog (org.apache.hadoop.hive.metastore.api.Catalog)34 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)30 Test (org.junit.Test)28 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)23 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)23 Database (org.apache.hadoop.hive.metastore.api.Database)19 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)13 Table (org.apache.hadoop.hive.metastore.api.Table)10 PartitionBuilder (org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder)10 Before (org.junit.Before)8 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 ArrayList (java.util.ArrayList)5 HashSet (java.util.HashSet)5 File (java.io.File)4 ISchema (org.apache.hadoop.hive.metastore.api.ISchema)4 ISchemaBuilder (org.apache.hadoop.hive.metastore.client.builder.ISchemaBuilder)4 SchemaVersion (org.apache.hadoop.hive.metastore.api.SchemaVersion)3 SchemaVersionBuilder (org.apache.hadoop.hive.metastore.client.builder.SchemaVersionBuilder)3 URI (java.net.URI)2