Search in sources :

Example 31 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableChangeColumnAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name] [CASCADE|RESTRICT]
    String oldColumnName = command.getChild(0).getText().toLowerCase();
    String newColumnName = command.getChild(1).getText().toLowerCase();
    String newType = getTypeStringFromAST((ASTNode) command.getChild(2));
    Table table = getTable(tableName);
    SkewedInfo skewInfo = table.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColumnName)) {
        throw new SemanticException(oldColumnName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    ASTNode constraintChild = null;
    for (int i = 3; i < command.getChildCount(); i++) {
        ASTNode child = (ASTNode) command.getChild(i);
        switch(child.getToken().getType()) {
            case HiveParser.StringLiteral:
                newComment = unescapeSQLString(child.getText());
                break;
            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveParser.KW_FIRST:
                first = true;
                break;
            case HiveParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveParser.TOK_RESTRICT:
                break;
            default:
                constraintChild = child;
        }
    }
    Constraints constraints = getConstraints(tableName, command, newColumnName, table, constraintChild);
    AlterTableChangeColumnDesc desc = new AlterTableChangeColumnDesc(tableName, partitionSpec, isCascade, constraints, unescapeIdentifier(oldColumnName), unescapeIdentifier(newColumnName), newType, newComment, first, flagCol);
    if (AcidUtils.isTransactionalTable(table)) {
        // Note: we might actually need it only when certain changes (e.g. name or type?) are made.
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 32 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableReplaceColumnsAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    List<FieldSchema> newCols = getColumns((ASTNode) command.getChild(0));
    boolean isCascade = false;
    if (null != command.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
        isCascade = true;
    }
    AlterTableReplaceColumnsDesc desc = new AlterTableReplaceColumnsDesc(tableName, partitionSpec, isCascade, newCols);
    Table table = getTable(tableName, true);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema)

Example 33 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableAddColumnsAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    List<FieldSchema> newCols = getColumns((ASTNode) command.getChild(0));
    boolean isCascade = false;
    if (null != command.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
        isCascade = true;
    }
    AlterTableAddColumnsDesc desc = new AlterTableAddColumnsDesc(tableName, partitionSpec, isCascade, newCols);
    Table table = getTable(tableName, true);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema)

Example 34 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowTransactionsAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    ctx.setResFile(ctx.getLocalTmpPath());
    ShowTransactionsDesc desc = new ShowTransactionsDesc(ctx.getResFile());
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowTransactionsDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Example 35 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CacheMetadataAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    Table table = AnalyzeCommandUtils.getTable(root, this);
    CacheMetadataDesc desc;
    // In 2 cases out of 3, we could pass the path and type directly to metastore...
    if (AnalyzeCommandUtils.isPartitionLevelStats(root)) {
        Map<String, String> partSpec = AnalyzeCommandUtils.getPartKeyValuePairsFromAST(table, root, conf);
        Partition part = PartitionUtils.getPartition(db, table, partSpec, true);
        desc = new CacheMetadataDesc(table.getDbName(), table.getTableName(), part.getName());
        inputs.add(new ReadEntity(part));
    } else {
        // Should we get all partitions for a partitioned table?
        desc = new CacheMetadataDesc(table.getDbName(), table.getTableName(), table.isPartitioned());
        inputs.add(new ReadEntity(table));
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Partition(org.apache.hadoop.hive.ql.metadata.Partition) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8