Search in sources :

Example 1 with DDLDesc

use of org.apache.hadoop.hive.ql.ddl.DDLDesc in project hive by apache.

the class AbstractVMMappingAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() < 4 || root.getChildCount() > 5) {
        throw new SemanticException("Invalid syntax for create or alter mapping.");
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    String entityType = root.getChild(1).getText();
    String entityName = PlanUtils.stripQuotes(root.getChild(2).getText());
    String poolPath = root.getChild(3).getType() == HiveParser.TOK_UNMANAGED ? null : // Null path => unmanaged
    WMUtils.poolPath(root.getChild(3));
    Integer ordering = root.getChildCount() == 5 ? Integer.valueOf(root.getChild(4).getText()) : null;
    DDLDesc desc = getDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with DDLDesc

use of org.apache.hadoop.hive.ql.ddl.DDLDesc in project hive by apache.

the class CreateTableHook method postAnalyze.

@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<?>> rootTasks) throws SemanticException {
    if (rootTasks.size() == 0) {
        // There will be no DDL task created in case if its CREATE TABLE IF NOT EXISTS
        return;
    }
    Task<?> t = rootTasks.get(rootTasks.size() - 1);
    if (!(t instanceof DDLTask)) {
        return;
    }
    DDLTask task = (DDLTask) t;
    DDLDesc d = task.getWork().getDDLDesc();
    if (!(d instanceof CreateTableDesc)) {
        return;
    }
    CreateTableDesc desc = (CreateTableDesc) d;
    Map<String, String> tblProps = desc.getTblProps();
    if (tblProps == null) {
        // tblProps will be null if user didnt use tblprops in his CREATE
        // TABLE cmd.
        tblProps = new HashMap<String, String>();
    }
    // first check if we will allow the user to create table.
    String storageHandler = desc.getStorageHandler();
    if (StringUtils.isNotEmpty(storageHandler)) {
        try {
            HiveStorageHandler storageHandlerInst = HCatUtil.getStorageHandler(context.getConf(), desc.getStorageHandler(), desc.getSerName(), desc.getInputFormat(), desc.getOutputFormat());
        // Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if
        // StorageDelegationAuthorizationProvider is used.
        } catch (IOException e) {
            throw new SemanticException(e);
        }
    }
    try {
        Table table = context.getHive().newTable(desc.getDbTableName());
        if (desc.getLocation() != null) {
            table.setDataLocation(new Path(desc.getLocation()));
        }
        if (desc.getStorageHandler() != null) {
            table.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, desc.getStorageHandler());
        }
        for (Map.Entry<String, String> prop : tblProps.entrySet()) {
            table.setProperty(prop.getKey(), prop.getValue());
        }
        for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
            table.setSerdeParam(prop.getKey(), prop.getValue());
        }
        if (HCatAuthUtil.isAuthorizationEnabled(context.getConf())) {
            authorize(table, Privilege.CREATE);
        }
    } catch (HiveException ex) {
        throw new SemanticException(ex);
    }
    desc.setTblProps(tblProps);
    context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
}
Also used : Path(org.apache.hadoop.fs.Path) HiveStorageHandler(org.apache.hadoop.hive.ql.metadata.HiveStorageHandler) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException) CreateTableDesc(org.apache.hadoop.hive.ql.ddl.table.create.CreateTableDesc) DDLTask(org.apache.hadoop.hive.ql.ddl.DDLTask) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc) HashMap(java.util.HashMap) Map(java.util.Map) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 3 with DDLDesc

use of org.apache.hadoop.hive.ql.ddl.DDLDesc in project hive by apache.

the class HCatSemanticAnalyzer method authorizeDDLWork.

@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
    DDLDesc ddlDesc = work.getDDLDesc();
    if (ddlDesc instanceof ShowDatabasesDesc) {
        authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(), HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
    } else if (ddlDesc instanceof DropDatabaseDesc) {
        DropDatabaseDesc dropDb = (DropDatabaseDesc) ddlDesc;
        Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
        if (db != null) {
            // if above returned a null, then the db does not exist - probably a
            // "drop database if exists" clause - don't try to authorize then.
            authorize(db, Privilege.DROP);
        }
    } else if (ddlDesc instanceof DescDatabaseDesc) {
        DescDatabaseDesc descDb = (DescDatabaseDesc) ddlDesc;
        Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
        authorize(db, Privilege.SELECT);
    } else if (ddlDesc instanceof SwitchDatabaseDesc) {
        SwitchDatabaseDesc switchDb = (SwitchDatabaseDesc) ddlDesc;
        Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
        authorize(db, Privilege.SELECT);
    } else if (ddlDesc instanceof ShowTablesDesc) {
        ShowTablesDesc showTables = (ShowTablesDesc) ddlDesc;
        String dbName = showTables.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTables.getDbName();
        authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    } else if (ddlDesc instanceof DescTableDesc) {
        // we should be careful when authorizing table based on just the
        // table name. If columns have separate authorization domain, it
        // must be honored
        DescTableDesc descTable = (DescTableDesc) ddlDesc;
        String tableName = extractTableName(descTable.getDbTableName());
        authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    } else if (ddlDesc instanceof ShowTableStatusDesc) {
        ShowTableStatusDesc showTableStatus = (ShowTableStatusDesc) ddlDesc;
        String dbName = showTableStatus.getDbName() == null ? SessionState.get().getCurrentDatabase() : showTableStatus.getDbName();
        authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
    } else if (ddlDesc instanceof AlterTableDropPartitionDesc) {
        AlterTableDropPartitionDesc dropPartition = (AlterTableDropPartitionDesc) ddlDesc;
        // this is actually a ALTER TABLE DROP PARITITION statement
        for (AlterTableDropPartitionDesc.PartitionDesc partSpec : dropPartition.getPartSpecs()) {
            // partitions are not added as write entries in drop partitions in Hive
            Table table = hive.getTable(SessionState.get().getCurrentDatabase(), dropPartition.getTableName());
            List<Partition> partitions = null;
            try {
                partitions = hive.getPartitionsByFilter(table, partSpec.getPartSpec().getExprString());
            } catch (Exception e) {
                throw new HiveException(e);
            }
            for (Partition part : partitions) {
                authorize(part, Privilege.DROP);
            }
        }
    } else if (ddlDesc instanceof ShowPartitionsDesc) {
        ShowPartitionsDesc showParts = (ShowPartitionsDesc) ddlDesc;
        String tableName = extractTableName(showParts.getTabName());
        authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
    } else if (ddlDesc instanceof AlterTableSetLocationDesc) {
        AlterTableSetLocationDesc alterTable = (AlterTableSetLocationDesc) ddlDesc;
        Table table = hive.getTable(SessionState.get().getCurrentDatabase(), Utilities.getDbTableName(alterTable.getDbTableName())[1], false);
        Partition part = null;
        if (alterTable.getPartitionSpec() != null) {
            part = hive.getPartition(table, alterTable.getPartitionSpec(), false);
        }
        String newLocation = alterTable.getLocation();
        /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
      * for the old table/partition location and the new location.
      */
        if (part != null) {
            // authorize for the old
            authorize(part, Privilege.ALTER_DATA);
            // location, and new location
            part.setLocation(newLocation);
            authorize(part, Privilege.ALTER_DATA);
        } else {
            // authorize for the old
            authorize(table, Privilege.ALTER_DATA);
            // location, and new location
            table.getTTable().getSd().setLocation(newLocation);
            authorize(table, Privilege.ALTER_DATA);
        }
    }
}
Also used : DropDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc) AlterTableDropPartitionDesc(org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc) Partition(org.apache.hadoop.hive.ql.metadata.Partition) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DescTableDesc(org.apache.hadoop.hive.ql.ddl.table.info.desc.DescTableDesc) SwitchDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.use.SwitchDatabaseDesc) ShowDatabasesDesc(org.apache.hadoop.hive.ql.ddl.database.show.ShowDatabasesDesc) HCatException(org.apache.hive.hcatalog.common.HCatException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ShowPartitionsDesc(org.apache.hadoop.hive.ql.ddl.table.partition.show.ShowPartitionsDesc) ShowTablesDesc(org.apache.hadoop.hive.ql.ddl.table.info.show.tables.ShowTablesDesc) ShowTableStatusDesc(org.apache.hadoop.hive.ql.ddl.table.info.show.status.ShowTableStatusDesc) Database(org.apache.hadoop.hive.metastore.api.Database) AlterTableDropPartitionDesc(org.apache.hadoop.hive.ql.ddl.table.partition.drop.AlterTableDropPartitionDesc) List(java.util.List) AlterTableSetLocationDesc(org.apache.hadoop.hive.ql.ddl.table.storage.set.location.AlterTableSetLocationDesc) DescDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.desc.DescDatabaseDesc) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc)

Example 4 with DDLDesc

use of org.apache.hadoop.hive.ql.ddl.DDLDesc in project hive by apache.

the class CreateDatabaseHook method authorizeDDLWork.

@Override
protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work) throws HiveException {
    DDLDesc ddlDesc = work.getDDLDesc();
    if (ddlDesc instanceof CreateDatabaseDesc) {
        CreateDatabaseDesc createDb = (CreateDatabaseDesc) ddlDesc;
        Database db = new Database(createDb.getName(), createDb.getComment(), createDb.getLocationUri(), createDb.getDatabaseProperties());
        authorize(db, Privilege.CREATE);
    }
}
Also used : CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) Database(org.apache.hadoop.hive.metastore.api.Database) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc)

Example 5 with DDLDesc

use of org.apache.hadoop.hive.ql.ddl.DDLDesc in project hive by apache.

the class TaskCompiler method patchUpAfterCTASorMaterializedView.

private void patchUpAfterCTASorMaterializedView(List<Task<?>> rootTasks, Set<ReadEntity> inputs, Set<WriteEntity> outputs, Task<?> createTask, boolean createTaskAfterMoveTask) {
    // clear the mapredWork output file from outputs for CTAS
    // DDLWork at the tail of the chain will have the output
    Iterator<WriteEntity> outIter = outputs.iterator();
    while (outIter.hasNext()) {
        switch(outIter.next().getType()) {
            case DFS_DIR:
            case LOCAL_DIR:
                outIter.remove();
                break;
            default:
                break;
        }
    }
    // find all leaf tasks and make the DDLTask as a dependent task on all of them
    Set<Task<?>> leaves = new LinkedHashSet<>();
    getLeafTasks(rootTasks, leaves);
    assert (leaves.size() > 0);
    // Target task is supposed to be the last task
    Task<?> targetTask = createTask;
    for (Task<?> task : leaves) {
        if (task instanceof StatsTask) {
            // StatsTask require table to already exist
            for (Task<?> parentOfStatsTask : task.getParentTasks()) {
                if (parentOfStatsTask instanceof MoveTask && !createTaskAfterMoveTask) {
                    // For partitioned CTAS, we need to create the table before the move task
                    // as we need to create the partitions in metastore and for that we should
                    // have already registered the table
                    interleaveTask(parentOfStatsTask, createTask);
                } else {
                    parentOfStatsTask.addDependentTask(createTask);
                }
            }
            for (Task<?> parentOfCrtTblTask : createTask.getParentTasks()) {
                parentOfCrtTblTask.removeDependentTask(task);
            }
            createTask.addDependentTask(task);
            targetTask = task;
        } else if (task instanceof MoveTask && !createTaskAfterMoveTask) {
            // For partitioned CTAS, we need to create the table before the move task
            // as we need to create the partitions in metastore and for that we should
            // have already registered the table
            interleaveTask(task, createTask);
            targetTask = task;
        } else {
            task.addDependentTask(createTask);
        }
    }
    // Add task to insert / delete materialized view from registry if needed
    if (createTask instanceof DDLTask) {
        DDLTask ddlTask = (DDLTask) createTask;
        DDLWork work = ddlTask.getWork();
        DDLDesc desc = work.getDDLDesc();
        if (desc instanceof CreateMaterializedViewDesc) {
            CreateMaterializedViewDesc createViewDesc = (CreateMaterializedViewDesc) desc;
            String tableName = createViewDesc.getViewName();
            boolean retrieveAndInclude = createViewDesc.isRewriteEnabled();
            MaterializedViewUpdateDesc materializedViewUpdateDesc = new MaterializedViewUpdateDesc(tableName, retrieveAndInclude, false, false);
            DDLWork ddlWork = new DDLWork(inputs, outputs, materializedViewUpdateDesc);
            targetTask.addDependentTask(TaskFactory.get(ddlWork, conf));
        } else if (desc instanceof AlterMaterializedViewRewriteDesc) {
            AlterMaterializedViewRewriteDesc alterMVRewriteDesc = (AlterMaterializedViewRewriteDesc) desc;
            String tableName = alterMVRewriteDesc.getMaterializedViewName();
            boolean retrieveAndInclude = alterMVRewriteDesc.isRewriteEnable();
            boolean disableRewrite = !alterMVRewriteDesc.isRewriteEnable();
            MaterializedViewUpdateDesc materializedViewUpdateDesc = new MaterializedViewUpdateDesc(tableName, retrieveAndInclude, disableRewrite, false);
            DDLWork ddlWork = new DDLWork(inputs, outputs, materializedViewUpdateDesc);
            targetTask.addDependentTask(TaskFactory.get(ddlWork, conf));
        }
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) DDLTask(org.apache.hadoop.hive.ql.ddl.DDLTask) BasicStatsNoJobTask(org.apache.hadoop.hive.ql.stats.BasicStatsNoJobTask) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Task(org.apache.hadoop.hive.ql.exec.Task) StatsTask(org.apache.hadoop.hive.ql.exec.StatsTask) MoveTask(org.apache.hadoop.hive.ql.exec.MoveTask) AlterMaterializedViewRewriteDesc(org.apache.hadoop.hive.ql.ddl.view.materialized.alter.rewrite.AlterMaterializedViewRewriteDesc) StatsTask(org.apache.hadoop.hive.ql.exec.StatsTask) CreateMaterializedViewDesc(org.apache.hadoop.hive.ql.ddl.view.create.CreateMaterializedViewDesc) MaterializedViewUpdateDesc(org.apache.hadoop.hive.ql.ddl.view.materialized.update.MaterializedViewUpdateDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) MoveTask(org.apache.hadoop.hive.ql.exec.MoveTask) DDLTask(org.apache.hadoop.hive.ql.ddl.DDLTask) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc)

Aggregations

DDLDesc (org.apache.hadoop.hive.ql.ddl.DDLDesc)6 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 Table (org.apache.hadoop.hive.ql.metadata.Table)3 Map (java.util.Map)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 DDLTask (org.apache.hadoop.hive.ql.ddl.DDLTask)2 IOException (java.io.IOException)1 HashMap (java.util.HashMap)1 LinkedHashSet (java.util.LinkedHashSet)1 List (java.util.List)1 Path (org.apache.hadoop.fs.Path)1 CreateDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc)1 DescDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.desc.DescDatabaseDesc)1 DropDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc)1 ShowDatabasesDesc (org.apache.hadoop.hive.ql.ddl.database.show.ShowDatabasesDesc)1 SwitchDatabaseDesc (org.apache.hadoop.hive.ql.ddl.database.use.SwitchDatabaseDesc)1 CreateTableDesc (org.apache.hadoop.hive.ql.ddl.table.create.CreateTableDesc)1 DescTableDesc (org.apache.hadoop.hive.ql.ddl.table.info.desc.DescTableDesc)1