Search in sources :

Example 1 with CreateDatabaseDesc

use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.

the class CreateDatabaseHook method authorizeDDLWork.

@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
    DDLDesc ddlDesc = work.getDDLDesc();
    if (ddlDesc instanceof CreateDatabaseDesc) {
        CreateDatabaseDesc createDb = (CreateDatabaseDesc) ddlDesc;
        Database db = new Database(createDb.getName(), createDb.getComment(), createDb.getLocationUri(), createDb.getDatabaseProperties());
        authorize(db, Privilege.CREATE);
    }
}
Also used : CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) Database(org.apache.hadoop.hive.metastore.api.Database) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc)

Example 2 with CreateDatabaseDesc

use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.

the class LoadDatabase method createDbTask.

private Task<?> createDbTask(Database dbObj) throws MetaException {
    // note that we do not set location - for repl load, we want that auto-created.
    CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), getDbLocation(dbObj), getDbManagedLocation(dbObj), false, updateDbProps(dbObj, context.dumpDirectory));
    // If it exists, we want this to be an error condition. Repl Load is not intended to replace a
    // db.
    // TODO: we might revisit this in create-drop-recreate cases, needs some thinking on.
    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector);
    return TaskFactory.get(work, context.hiveConf);
}
Also used : Path(org.apache.hadoop.fs.Path) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc)

Example 3 with CreateDatabaseDesc

use of org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc in project hive by apache.

the class CreateDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    MetaData metaData;
    try {
        FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
        metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
    } catch (IOException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }
    Database db = metaData.getDatabase();
    String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
    Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    if (!db.getParameters().isEmpty()) {
        AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbProperties);
    }
    if (StringUtils.isNotEmpty(db.getOwnerName())) {
        AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbTask);
    }
    updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
    return Collections.singletonList(createDBTask);
}
Also used : Path(org.apache.hadoop.fs.Path) IOException(java.io.IOException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HashSet(java.util.HashSet)

Aggregations

CreateDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc)3 Path (org.apache.hadoop.fs.Path)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)2 IOException (java.io.IOException)1 HashSet (java.util.HashSet)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 DDLDesc (org.apache.hadoop.hive.ql.ddl.DDLDesc)1 AlterDatabaseSetOwnerDesc (org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc)1 AlterDatabaseSetPropertiesDesc (org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc)1 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 MetaData (org.apache.hadoop.hive.ql.parse.repl.load.MetaData)1