Search in sources :

Example 21 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableFileFormat.

private void analyzeAlterTableFileFormat(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    StorageFormat format = new StorageFormat(conf);
    ASTNode child = (ASTNode) ast.getChild(0);
    if (!format.fillStorageFormat(child)) {
        throw new AssertionError("Unknown token " + child.getText());
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, format.getInputFormat(), format.getOutputFormat(), format.getSerde(), format.getStorageHandler(), partSpec);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 22 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDescribeTable.

/**
   * A query like this will generate a tree as follows
   *   "describe formatted default.maptable partition (b=100) id;"
   * TOK_TABTYPE
   *   TOK_TABNAME --> root for tablename, 2 child nodes mean DB specified
   *     default
   *     maptable
   *   TOK_PARTSPEC  --> root node for partition spec. else columnName
   *     TOK_PARTVAL
   *       b
   *       100
   *   id           --> root node for columnName
   * formatted
   */
private void analyzeDescribeTable(ASTNode ast) throws SemanticException {
    ASTNode tableTypeExpr = (ASTNode) ast.getChild(0);
    String dbName = null;
    String tableName = null;
    String colPath = null;
    Map<String, String> partSpec = null;
    ASTNode tableNode = null;
    // tablename is either TABLENAME or DBNAME.TABLENAME if db is given
    if (((ASTNode) tableTypeExpr.getChild(0)).getType() == HiveParser.TOK_TABNAME) {
        tableNode = (ASTNode) tableTypeExpr.getChild(0);
        if (tableNode.getChildCount() == 1) {
            tableName = ((ASTNode) tableNode.getChild(0)).getText();
        } else {
            dbName = ((ASTNode) tableNode.getChild(0)).getText();
            tableName = dbName + "." + ((ASTNode) tableNode.getChild(1)).getText();
        }
    } else {
        throw new SemanticException(((ASTNode) tableTypeExpr.getChild(0)).getText() + " is not an expected token type");
    }
    // process the second child,if exists, node to get partition spec(s)
    partSpec = QualifiedNameUtil.getPartitionSpec(db, tableTypeExpr, tableName);
    // process the third child node,if exists, to get partition spec(s)
    colPath = QualifiedNameUtil.getColPath(db, tableTypeExpr, dbName, tableName, partSpec);
    // validate database
    if (dbName != null) {
        validateDatabase(dbName);
    }
    if (partSpec != null) {
        validateTable(tableName, partSpec);
    }
    DescTableDesc descTblDesc = new DescTableDesc(ctx.getResFile(), tableName, partSpec, colPath);
    boolean showColStats = false;
    if (ast.getChildCount() == 2) {
        int descOptions = ast.getChild(1).getType();
        descTblDesc.setFormatted(descOptions == HiveParser.KW_FORMATTED);
        descTblDesc.setExt(descOptions == HiveParser.KW_EXTENDED);
        descTblDesc.setPretty(descOptions == HiveParser.KW_PRETTY);
        // if we are describing a table or column
        if (!colPath.equalsIgnoreCase(tableName) && descTblDesc.isFormatted()) {
            showColStats = true;
        }
    }
    inputs.add(new ReadEntity(getTable(tableName)));
    Task ddlTask = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descTblDesc), conf);
    rootTasks.add(ddlTask);
    String schema = DescTableDesc.getSchema(showColStats);
    setFetchTask(createFetchTask(schema));
    LOG.info("analyzeDescribeTable done");
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Task(org.apache.hadoop.hive.ql.exec.Task) ColumnStatsUpdateTask(org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DescTableDesc(org.apache.hadoop.hive.ql.plan.DescTableDesc)

Example 23 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDropDatabase.

private void analyzeDropDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifExists = false;
    boolean ifCascade = false;
    if (null != ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)) {
        ifExists = true;
    }
    if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
        ifCascade = true;
    }
    Database database = getDatabase(dbName, !ifExists);
    if (database == null) {
        return;
    }
    // if cascade=true, then we need to authorize the drop table action as well
    if (ifCascade) {
        // add the tables as well to outputs
        List<String> tableNames;
        // get names of all tables under this dbName
        try {
            tableNames = db.getAllTables(dbName);
        } catch (HiveException e) {
            throw new SemanticException(e);
        }
        // add tables to outputs
        if (tableNames != null) {
            for (String tableName : tableNames) {
                Table table = getTable(dbName, tableName, true);
                // We want no lock here, as the database lock will cover the tables,
                // and putting a lock will actually cause us to deadlock on ourselves.
                outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK));
            }
        }
    }
    inputs.add(new ReadEntity(database));
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DropDatabaseDesc(org.apache.hadoop.hive.ql.plan.DropDatabaseDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 24 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDescFunction.

/**
   * Add the task according to the parsed command tree. This is used for the CLI
   * command "DESCRIBE FUNCTION;".
   *
   * @param ast
   *          The parsed command tree.
   * @throws SemanticException
   *           Parsing failed
   */
private void analyzeDescFunction(ASTNode ast) throws SemanticException {
    String funcName;
    boolean isExtended;
    if (ast.getChildCount() == 1) {
        funcName = stripQuotes(ast.getChild(0).getText());
        isExtended = false;
    } else if (ast.getChildCount() == 2) {
        funcName = stripQuotes(ast.getChild(0).getText());
        isExtended = true;
    } else {
        throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
    }
    DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descFuncDesc), conf));
    setFetchTask(createFetchTask(descFuncDesc.getSchema()));
}
Also used : DescFunctionDesc(org.apache.hadoop.hive.ql.plan.DescFunctionDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 25 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableDropParts.

private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) throws SemanticException {
    boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
    // If the drop has to fail on non-existent partitions, we cannot batch expressions.
    // That is because we actually have to check each separate expression for existence.
    // We could do a small optimization for the case where expr has all columns and all
    // operators are equality, if we assume those would always match one partition (which
    // may not be true with legacy, non-normalized column values). This is probably a
    // popular case but that's kinda hacky. Let's not do it for now.
    boolean canGroupExprs = ifExists;
    boolean mustPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null);
    ReplicationSpec replicationSpec = new ReplicationSpec(ast);
    Table tab = null;
    try {
        tab = getTable(qualified);
    } catch (SemanticException se) {
        if (replicationSpec.isInReplicationScope() && ((se.getCause() instanceof InvalidTableException) || (se.getMessage().contains(ErrorMsg.INVALID_TABLE.getMsg())))) {
            // We just return in that case, no drop needed.
            return;
        // TODO : the contains message check is fragile, we should refactor SemanticException to be
        // queriable for error code, and not simply have a message
        // NOTE : IF_EXISTS might also want to invoke this, but there's a good possibility
        // that IF_EXISTS is stricter about table existence, and applies only to the ptn.
        // Therefore, ignoring IF_EXISTS here.
        } else {
            throw se;
        }
    }
    Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = getFullPartitionSpecs(ast, tab, canGroupExprs);
    // nothing to do
    if (partSpecs.isEmpty())
        return;
    validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
    ReadEntity re = new ReadEntity(tab);
    re.noLockNeeded();
    inputs.add(re);
    addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists);
    DropTableDesc dropTblDesc = new DropTableDesc(getDotName(qualified), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, mustPurge, replicationSpec);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) ArrayList(java.util.ArrayList) List(java.util.List) LinkedList(java.util.LinkedList)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)104 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)23 Table (org.apache.hadoop.hive.ql.metadata.Table)20 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)20 Test (org.junit.Test)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)17 LinkedHashMap (java.util.LinkedHashMap)15 HashMap (java.util.HashMap)14 ArrayList (java.util.ArrayList)13 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)12 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)12 PrivilegeDesc (org.apache.hadoop.hive.ql.plan.PrivilegeDesc)9 RoleDDLDesc (org.apache.hadoop.hive.ql.plan.RoleDDLDesc)9 Map (java.util.Map)8 Path (org.apache.hadoop.fs.Path)8 FileNotFoundException (java.io.FileNotFoundException)7 LinkedList (java.util.LinkedList)7 List (java.util.List)7 GrantRevokeRoleDDL (org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL)7 ShowGrantDesc (org.apache.hadoop.hive.ql.plan.ShowGrantDesc)7