Search in sources :

Example 11 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DescDatabaseAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() == 0 || root.getChildCount() > 2) {
        throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE");
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String databaseName = stripQuotes(root.getChild(0).getText());
    boolean isExtended = root.getChildCount() == 2;
    inputs.add(new ReadEntity(getDatabase(databaseName)));
    DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), databaseName, isExtended);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(desc.getSchema()));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 12 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableAlterPartitionAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    Table table = getTable(tableName);
    validateAlterTableType(table, AlterTableType.ALTERPARTITION, false);
    inputs.add(new ReadEntity(table));
    // Alter table ... partition column ( column newtype) only takes one column at a time.
    ASTNode colAst = (ASTNode) command.getChild(0);
    String name = colAst.getChild(0).getText().toLowerCase();
    String type = getTypeStringFromAST((ASTNode) (colAst.getChild(1)));
    String comment = (colAst.getChildCount() == 3) ? unescapeSQLString(colAst.getChild(2).getText()) : null;
    FieldSchema newCol = new FieldSchema(unescapeIdentifier(name), type, comment);
    boolean isDefined = false;
    for (FieldSchema col : table.getTTable().getPartitionKeys()) {
        if (col.getName().compareTo(newCol.getName()) == 0) {
            isDefined = true;
        }
    }
    if (!isDefined) {
        throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(newCol.getName()));
    }
    AlterTableAlterPartitionDesc desc = new AlterTableAlterPartitionDesc(tableName.getNotEmptyDbTable(), newCol);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 13 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DescTableAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    ctx.setResFile(ctx.getLocalTmpPath());
    ASTNode tableTypeExpr = (ASTNode) root.getChild(0);
    TableName tableName = getQualifiedTableName((ASTNode) tableTypeExpr.getChild(0));
    // if database is not the one currently using validate database
    if (tableName.getDb() != null) {
        db.validateDatabaseExists(tableName.getDb());
    }
    Table table = getTable(tableName);
    // process the second child, if exists, node to get partition spec(s)
    Map<String, String> partitionSpec = getPartitionSpec(db, tableTypeExpr, tableName);
    if (partitionSpec != null) {
        // validate that partition exists
        PartitionUtils.getPartition(db, table, partitionSpec, true);
    }
    // process the third child node,if exists, to get partition spec(s)
    String columnPath = getColumnPath(db, tableTypeExpr, tableName, partitionSpec);
    boolean showColStats = false;
    boolean isFormatted = false;
    boolean isExt = false;
    if (root.getChildCount() == 2) {
        int descOptions = root.getChild(1).getType();
        isFormatted = descOptions == HiveParser.KW_FORMATTED;
        isExt = descOptions == HiveParser.KW_EXTENDED;
        // This is how we can differentiate if we are describing a table or column.
        if (columnPath != null && isFormatted) {
            showColStats = true;
        }
    }
    inputs.add(new ReadEntity(table));
    DescTableDesc desc = new DescTableDesc(ctx.getResFile(), tableName, partitionSpec, columnPath, isExt, isFormatted);
    Task<?> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    String schema = showColStats ? DescTableDesc.COLUMN_STATISTICS_SCHEMA : DescTableDesc.SCHEMA;
    setFetchTask(createFetchTask(schema));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) TableName(org.apache.hadoop.hive.common.TableName) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 14 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowTableStatusAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() > 3 || root.getChildCount() < 1) {
        throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg());
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String tableNames = getUnescapedName((ASTNode) root.getChild(0));
    String dbName = SessionState.get().getCurrentDatabase();
    Map<String, String> partitionSpec = null;
    if (root.getChildCount() > 1) {
        for (int i = 1; i < root.getChildCount(); i++) {
            ASTNode child = (ASTNode) root.getChild(i);
            if (child.getToken().getType() == HiveParser.Identifier) {
                dbName = unescapeIdentifier(child.getText());
            } else if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) {
                partitionSpec = getValidatedPartSpec(getTable(tableNames), child, conf, false);
            } else {
                throw new SemanticException(ErrorMsg.INVALID_AST_TREE.getMsg(child.toStringTree() + " , Invalid token " + child.getToken().getType()));
            }
        }
    }
    if (partitionSpec != null) {
        // validate that partition exists
        PartitionUtils.getPartition(db, getTable(HiveTableName.of(tableNames)), partitionSpec, true);
    }
    ShowTableStatusDesc desc = new ShowTableStatusDesc(ctx.getResFile(), dbName, tableNames, partitionSpec);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowTableStatusDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 15 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableAddConstraintAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
    // the user specified a catalog
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
    ASTNode constraintNode = (ASTNode) command.getChild(0);
    switch(constraintNode.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys);
            break;
        case HiveParser.TOK_CHECK_CONSTRAINT:
            ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, ctx.getTokenRewriteStream());
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText()));
    }
    Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, checkConstraints);
    AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints);
    Table table = getTable(tableName);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8