Search in sources :

Example 36 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project brisk by riptano.

the class MetaStorePersisterTest method testFindMetaStoreEntities.

@Test
public void testFindMetaStoreEntities() throws Exception {
    setupClient();
    Database database = new Database();
    database.setName("dbname");
    database.setDescription("description");
    database.setLocationUri("uri");
    database.setParameters(new HashMap<String, String>());
    metaStorePersister.save(database.metaDataMap, database, database.getName());
    Table table = new Table();
    table.setDbName("dbname");
    table.setTableName("table_one");
    metaStorePersister.save(table.metaDataMap, table, table.getDbName());
    table.setTableName("table_two");
    metaStorePersister.save(table.metaDataMap, table, table.getDbName());
    table.setTableName("table_three");
    metaStorePersister.save(table.metaDataMap, table, table.getDbName());
    table.setTableName("other_table");
    metaStorePersister.save(table.metaDataMap, table, table.getDbName());
    List tables = metaStorePersister.find(table, "dbname");
    assertEquals(4, tables.size());
    tables = metaStorePersister.find(table, "dbname", "table", 100);
    assertEquals(3, tables.size());
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 37 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project brisk by riptano.

the class MetaStorePersisterTest method testEntityDeletion.

@Test
public void testEntityDeletion() throws Exception {
    setupClient();
    Database database = new Database();
    database.setName("dbname");
    database.setDescription("description");
    database.setLocationUri("uri");
    database.setParameters(new HashMap<String, String>());
    metaStorePersister.save(database.metaDataMap, database, database.getName());
    Table table = new Table();
    table.setDbName("dbname");
    table.setTableName("table_one");
    metaStorePersister.save(table.metaDataMap, table, table.getDbName());
    Database foundDb = new Database();
    foundDb.setName("dbname");
    metaStorePersister.load(foundDb, "dbname");
    assertEquals(database, foundDb);
    Table foundTable = new Table();
    foundTable.setDbName(table.getDbName());
    foundTable.setTableName(table.getTableName());
    metaStorePersister.load(foundTable, "dbname");
    assertEquals(table, foundTable);
    metaStorePersister.remove(foundTable, "dbname");
    metaStorePersister.remove(foundDb, "dbname");
    try {
        metaStorePersister.load(foundTable, "dbname");
        fail();
        metaStorePersister.load(foundDb, "dbname");
        fail();
    } catch (NotFoundException e) {
    // win! \o/
    }
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) Database(org.apache.hadoop.hive.metastore.api.Database) NotFoundException(org.apache.cassandra.thrift.NotFoundException)

Example 38 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project metacat by Netflix.

the class HiveConvertersImpl method metacatToHiveDatabase.

/**
 * {@inheritDoc}
 */
@Override
@SuppressWarnings("unchecked")
public Database metacatToHiveDatabase(final DatabaseDto dto) {
    final Database database = new Database();
    String name = "";
    String description = "";
    final QualifiedName databaseName = dto.getName();
    if (databaseName != null) {
        name = databaseName.getDatabaseName();
        // Since this is required setting it to the same as the DB name for now
        description = databaseName.getDatabaseName();
    }
    database.setName(name);
    database.setDescription(description);
    String dbUri = dto.getUri();
    if (Strings.isNullOrEmpty(dbUri)) {
        dbUri = "";
    }
    database.setLocationUri(dbUri);
    Map<String, String> metadata = dto.getMetadata();
    if (metadata == null) {
        metadata = Collections.EMPTY_MAP;
    }
    database.setParameters(metadata);
    return database;
}
Also used : QualifiedName(com.netflix.metacat.common.QualifiedName) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 39 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLSemanticAnalyzer method analyzeSwitchDatabase.

private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    Database database = getDatabase(dbName, true);
    ReadEntity dbReadEntity = new ReadEntity(database);
    dbReadEntity.noLockNeeded();
    inputs.add(dbReadEntity);
    SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) SwitchDatabaseDesc(org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc)

Example 40 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLSemanticAnalyzer method analyzeCreateDatabase.

private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATABASECOMMENT:
                dbComment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case TOK_DATABASEPROPERTIES:
                dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
                break;
            case TOK_DATABASELOCATION:
                dbLocation = unescapeSQLString(childNode.getChild(0).getText());
                addLocationToOutputs(dbLocation);
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
        }
    }
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists);
    if (dbProps != null) {
        createDatabaseDesc.setDatabaseProperties(dbProps);
    }
    Database database = new Database(dbName, dbComment, dbLocation, dbProps);
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc) ShowCreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17