Search in sources :

Example 31 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class TestHBaseStore method createDb.

@Test
public void createDb() throws Exception {
    String dbname = "mydb";
    Database db = new Database(dbname, "no description", "file:///tmp", emptyParameters);
    store.createDatabase(db);
    Database d = store.getDatabase(dbname);
    Assert.assertEquals(dbname, d.getName());
    Assert.assertEquals("no description", d.getDescription());
    Assert.assertEquals("file:///tmp", d.getLocationUri());
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) Test(org.junit.Test)

Example 32 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project incubator-atlas by apache.

the class HiveHook method createOrUpdateEntities.

private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws AtlasHookException {
    try {
        Database db = null;
        Table table = null;
        Partition partition = null;
        LinkedHashMap<Type, Referenceable> result = new LinkedHashMap<>();
        List<Referenceable> entities = new ArrayList<>();
        switch(entity.getType()) {
            case DATABASE:
                db = entity.getDatabase();
                break;
            case TABLE:
                table = entity.getTable();
                db = dgiBridge.hiveClient.getDatabase(table.getDbName());
                break;
            case PARTITION:
                partition = entity.getPartition();
                table = partition.getTable();
                db = dgiBridge.hiveClient.getDatabase(table.getDbName());
                break;
            default:
                LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType());
        }
        if (db != null) {
            db = dgiBridge.hiveClient.getDatabase(db.getName());
        }
        if (db != null) {
            Referenceable dbEntity = dgiBridge.createDBInstance(db);
            entities.add(dbEntity);
            result.put(Type.DATABASE, dbEntity);
            Referenceable tableEntity = null;
            if (table != null) {
                if (existTable != null) {
                    table = existTable;
                } else {
                    table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
                }
                // we create the table since we need the HDFS path to temp table lineage.
                if (skipTempTables && table.isTemporary() && !TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
                    LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name());
                } else {
                    tableEntity = dgiBridge.createTableInstance(dbEntity, table);
                    entities.add(tableEntity);
                    result.put(Type.TABLE, tableEntity);
                }
            }
            event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
        }
        return result;
    } catch (Exception e) {
        throw new AtlasHookException("HiveHook.createOrUpdateEntities() failed.", e);
    }
}
Also used : Partition(org.apache.hadoop.hive.ql.metadata.Partition) Table(org.apache.hadoop.hive.ql.metadata.Table) ArrayList(java.util.ArrayList) AtlasHookException(org.apache.atlas.hook.AtlasHookException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MalformedURLException(java.net.MalformedURLException) AtlasHookException(org.apache.atlas.hook.AtlasHookException) LinkedHashMap(java.util.LinkedHashMap) Type(org.apache.hadoop.hive.ql.hooks.Entity.Type) TableType(org.apache.hadoop.hive.metastore.TableType) HookNotification(org.apache.atlas.notification.hook.HookNotification) Referenceable(org.apache.atlas.typesystem.Referenceable) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 33 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project incubator-atlas by apache.

the class HiveMetaStoreBridgeTest method testImportThatUpdatesRegisteredDatabase.

@Test
public void testImportThatUpdatesRegisteredDatabase() throws Exception {
    // setup database
    when(hiveClient.getAllDatabases()).thenReturn(Arrays.asList(new String[] { TEST_DB_NAME }));
    String description = "This is a default database";
    when(hiveClient.getDatabase(TEST_DB_NAME)).thenReturn(new Database(TEST_DB_NAME, description, "/user/hive/default", null));
    when(hiveClient.getAllTables(TEST_DB_NAME)).thenReturn(Arrays.asList(new String[] {}));
    returnExistingDatabase(TEST_DB_NAME, atlasClient, CLUSTER_NAME);
    HiveMetaStoreBridge bridge = new HiveMetaStoreBridge(CLUSTER_NAME, hiveClient, atlasClient);
    bridge.importHiveMetadata(true);
    // verify update is called
    verify(atlasClient).updateEntity(eq("72e06b34-9151-4023-aa9d-b82103a50e76"), (Referenceable) argThat(new MatchesReferenceableProperty(HiveMetaStoreBridge.DESCRIPTION_ATTR, description)));
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) Test(org.testng.annotations.Test)

Example 34 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project incubator-atlas by apache.

the class HiveMetaStoreBridge method registerDatabase.

/**
     * Checks if db is already registered, else creates and registers db entity
     * @param databaseName
     * @return
     * @throws Exception
     */
private Referenceable registerDatabase(String databaseName) throws Exception {
    Referenceable dbRef = getDatabaseReference(clusterName, databaseName);
    Database db = hiveClient.getDatabase(databaseName);
    if (db != null) {
        if (dbRef == null) {
            dbRef = createDBInstance(db);
            dbRef = registerInstance(dbRef);
        } else {
            LOG.info("Database {} is already registered with id {}. Updating it.", databaseName, dbRef.getId().id);
            dbRef = createOrUpdateDBInstance(db, dbRef);
            updateInstance(dbRef);
        }
    }
    return dbRef;
}
Also used : Referenceable(org.apache.atlas.typesystem.Referenceable) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 35 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project brisk by riptano.

the class MetaStorePersisterTest method testBasicLoadMetaStoreEntity.

@Test
public void testBasicLoadMetaStoreEntity() throws Exception {
    setupClient();
    Database database = new Database();
    database.setName("name");
    database.setDescription("description");
    database.setLocationUri("uri");
    database.setParameters(new HashMap<String, String>());
    metaStorePersister.save(database.metaDataMap, database, database.getName());
    Database foundDb = new Database();
    foundDb.setName("name");
    metaStorePersister.load(foundDb, "name");
    assertEquals(database, foundDb);
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17