Search in sources :

Example 26 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeShowCreateTable.

private void analyzeShowCreateTable(ASTNode ast) throws SemanticException {
    ShowCreateTableDesc showCreateTblDesc;
    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
    showCreateTblDesc = new ShowCreateTableDesc(tableName, ctx.getResFile().toString());
    Table tab = getTable(tableName);
    if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) {
        throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName + " has table type INDEX_TABLE"));
    }
    inputs.add(new ReadEntity(tab));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showCreateTblDesc), conf));
    setFetchTask(createFetchTask(showCreateTblDesc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) ShowCreateTableDesc(org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 27 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeLockDatabase.

private void analyzeLockDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase());
    inputs.add(new ReadEntity(getDatabase(dbName)));
    // Lock database operation is to acquire the lock explicitly, the operation
    // itself doesn't need to be locked. Set the WriteEntity as WriteType:
    // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction.
    outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK));
    LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID));
    lockDatabaseDesc.setQueryStr(ctx.getCmd());
    DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc);
    rootTasks.add(TaskFactory.get(work, conf));
    ctx.setNeedLockMgr(true);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) LockDatabaseDesc(org.apache.hadoop.hive.ql.plan.LockDatabaseDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 28 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeCreateDatabase.

private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATABASECOMMENT:
                dbComment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case TOK_DATABASEPROPERTIES:
                dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
                break;
            case TOK_DATABASELOCATION:
                dbLocation = unescapeSQLString(childNode.getChild(0).getText());
                addLocationToOutputs(dbLocation);
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
        }
    }
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists);
    if (dbProps != null) {
        createDatabaseDesc.setDatabaseProperties(dbProps);
    }
    Database database = new Database(dbName, dbComment, dbLocation, dbProps);
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc), conf));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc) ShowCreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 29 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableAddConstraint.

private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException {
    ASTNode parent = (ASTNode) ast.getParent();
    ASTNode child = (ASTNode) ast.getChild(0);
    List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>();
    List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>();
    if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) {
        BaseSemanticAnalyzer.processPrimaryKeys(parent, child, primaryKeys);
    } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) {
        BaseSemanticAnalyzer.processForeignKeys(parent, child, foreignKeys);
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ArrayList(java.util.ArrayList)

Example 30 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableProps.

private void analyzeAlterTableProps(String[] qualified, HashMap<String, String> partSpec, ASTNode ast, boolean expectView, boolean isUnset) throws SemanticException {
    String tableName = getDotName(qualified);
    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0));
    EnvironmentContext environmentContext = null;
    // we need to check if the properties are valid, especially for stats.
    // they might be changed via alter table .. update statistics or
    // alter table .. set tblproperties. If the property is not row_count
    // or raw_data_size, it could not be changed through update statistics
    boolean changeStatsSucceeded = false;
    for (Entry<String, String> entry : mapProp.entrySet()) {
        // wrong.
        if (entry.getKey().equals(StatsSetupConst.ROW_COUNT) || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) {
            try {
                Long.parseLong(entry.getValue());
                changeStatsSucceeded = true;
            } catch (Exception e) {
                throw new SemanticException("AlterTable " + entry.getKey() + " failed with value " + entry.getValue());
            }
        } else {
            if (queryState.getCommandType().equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) || queryState.getCommandType().equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) {
                throw new SemanticException("AlterTable UpdateStats " + entry.getKey() + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and " + StatsSetupConst.RAW_DATA_SIZE);
            }
        }
        if (changeStatsSucceeded) {
            environmentContext = new EnvironmentContext();
            environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.USER);
        }
    }
    AlterTableDesc alterTblDesc = null;
    if (isUnset == true) {
        alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, partSpec, expectView);
        if (ast.getChild(1) != null) {
            alterTblDesc.setDropIfExists(true);
        }
    } else {
        alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDPROPS, partSpec, expectView);
    }
    alterTblDesc.setProps(mapProp);
    alterTblDesc.setEnvironmentContext(environmentContext);
    alterTblDesc.setOldName(tableName);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Also used : EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) InvocationTargetException(java.lang.reflect.InvocationTargetException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) FileNotFoundException(java.io.FileNotFoundException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)104 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)23 Table (org.apache.hadoop.hive.ql.metadata.Table)20 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)20 Test (org.junit.Test)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)17 LinkedHashMap (java.util.LinkedHashMap)15 HashMap (java.util.HashMap)14 ArrayList (java.util.ArrayList)13 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)12 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)12 PrivilegeDesc (org.apache.hadoop.hive.ql.plan.PrivilegeDesc)9 RoleDDLDesc (org.apache.hadoop.hive.ql.plan.RoleDDLDesc)9 Map (java.util.Map)8 Path (org.apache.hadoop.fs.Path)8 FileNotFoundException (java.io.FileNotFoundException)7 LinkedList (java.util.LinkedList)7 List (java.util.List)7 GrantRevokeRoleDDL (org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL)7 ShowGrantDesc (org.apache.hadoop.hive.ql.plan.ShowGrantDesc)7