Search in sources :

Example 41 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDropDatabase.

private void analyzeDropDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifExists = false;
    boolean ifCascade = false;
    if (null != ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)) {
        ifExists = true;
    }
    if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
        ifCascade = true;
    }
    Database database = getDatabase(dbName, !ifExists);
    if (database == null) {
        return;
    }
    // if cascade=true, then we need to authorize the drop table action as well
    if (ifCascade) {
        // add the tables as well to outputs
        List<String> tableNames;
        // get names of all tables under this dbName
        try {
            tableNames = db.getAllTables(dbName);
        } catch (HiveException e) {
            throw new SemanticException(e);
        }
        // add tables to outputs
        if (tableNames != null) {
            for (String tableName : tableNames) {
                Table table = getTable(dbName, tableName, true);
                // We want no lock here, as the database lock will cover the tables,
                // and putting a lock will actually cause us to deadlock on ourselves.
                outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK));
            }
        }
    }
    inputs.add(new ReadEntity(database));
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec());
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DropDatabaseDesc(org.apache.hadoop.hive.ql.plan.DropDatabaseDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 42 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class Utils method resetDbBootstrapDumpState.

public static void resetDbBootstrapDumpState(Hive hiveDb, String dbName, String uniqueKey) throws HiveException {
    Database database = hiveDb.getDatabase(dbName);
    if (database != null) {
        Map<String, String> params = database.getParameters();
        if ((params != null) && params.containsKey(uniqueKey)) {
            params.remove(uniqueKey);
            database.setParameters(params);
            hiveDb.alterDatabase(dbName, database);
            LOG.info("REPL DUMP:: Reset property for Database: {}, Property: {}", dbName, uniqueKey);
        }
    }
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database)

Example 43 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class AlterDatabaseHandler method handle.

@Override
public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
    if (!context.isTableNameEmpty()) {
        throw new SemanticException("Alter Database are not supported for table-level replication");
    }
    AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
    try {
        Database oldDb = msg.getDbObjBefore();
        Database newDb = msg.getDbObjAfter();
        AlterDatabaseDesc alterDbDesc;
        if ((oldDb.getOwnerType() == newDb.getOwnerType()) && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
            // If owner information is unchanged, then DB properties would've changed
            Map<String, String> newDbProps = new HashMap<>();
            Map<String, String> dbProps = newDb.getParameters();
            for (Map.Entry<String, String> entry : dbProps.entrySet()) {
                String key = entry.getKey();
                // Ignore the keys which are local to source warehouse
                if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX) || key.equals(ReplicationSpec.KEY.CURR_STATE_ID.toString())) {
                    continue;
                }
                newDbProps.put(key, entry.getValue());
            }
            alterDbDesc = new AlterDatabaseDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec());
        } else {
            alterDbDesc = new AlterDatabaseDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec());
        }
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, alterDbDesc));
        context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName);
        // Only database object is updated
        updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null);
        return Collections.singletonList(alterDbTask);
    } catch (Exception e) {
        throw (e instanceof SemanticException) ? (SemanticException) e : new SemanticException("Error reading message members", e);
    }
}
Also used : HashMap(java.util.HashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) PrincipalDesc(org.apache.hadoop.hive.ql.plan.PrincipalDesc) AlterDatabaseMessage(org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseDesc(org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc) HashMap(java.util.HashMap) Map(java.util.Map) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 44 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class ImportSemanticAnalyzer method fixLocationInPartSpec.

/**
 * Helper method to set location properly in partSpec
 */
private static void fixLocationInPartSpec(FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, ReplicationSpec replicationSpec, AddPartitionDesc.OnePartitionDesc partSpec, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, HiveException, IOException {
    Path tgtPath = null;
    if (tblDesc.getLocation() == null) {
        if (table.getDataLocation() != null) {
            tgtPath = new Path(table.getDataLocation().toString(), Warehouse.makePartPath(partSpec.getPartSpec()));
        } else {
            Database parentDb = x.getHive().getDatabase(tblDesc.getDatabaseName());
            tgtPath = new Path(wh.getDefaultTablePath(parentDb, tblDesc.getTableName()), Warehouse.makePartPath(partSpec.getPartSpec()));
        }
    } else {
        tgtPath = new Path(tblDesc.getLocation(), Warehouse.makePartPath(partSpec.getPartSpec()));
    }
    FileSystem tgtFs = FileSystem.get(tgtPath.toUri(), x.getConf());
    checkTargetLocationEmpty(tgtFs, tgtPath, replicationSpec, x.getLOG());
    partSpec.setLocation(tgtPath.toString());
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 45 with Database

use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.

the class MacroSemanticAnalyzer method addEntities.

private void addEntities() throws SemanticException {
    Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
    // This restricts macro creation to privileged users.
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Aggregations

Database (org.apache.hadoop.hive.metastore.api.Database)236 Test (org.junit.Test)107 Table (org.apache.hadoop.hive.metastore.api.Table)70 ArrayList (java.util.ArrayList)51 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)39 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)39 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)37 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)36 Partition (org.apache.hadoop.hive.metastore.api.Partition)35 Path (org.apache.hadoop.fs.Path)34 IOException (java.io.IOException)29 HashMap (java.util.HashMap)27 DatabaseBuilder (org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder)26 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)24 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)22 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)22 TException (org.apache.thrift.TException)21 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)20 FileSystem (org.apache.hadoop.fs.FileSystem)17