Search in sources :

Example 36 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeCreateDatabase.

private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATABASECOMMENT:
                dbComment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case TOK_DATABASEPROPERTIES:
                dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
                break;
            case TOK_DATABASELOCATION:
                dbLocation = unescapeSQLString(childNode.getChild(0).getText());
                addLocationToOutputs(dbLocation);
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
        }
    }
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists);
    if (dbProps != null) {
        createDatabaseDesc.setDatabaseProperties(dbProps);
    }
    Database database = new Database(dbName, dbComment, dbLocation, dbProps);
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), createDatabaseDesc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc) ShowCreateDatabaseDesc(org.apache.hadoop.hive.ql.plan.ShowCreateDatabaseDesc) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 37 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableDropParts.

private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView) throws SemanticException {
    boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null) || HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
    // If the drop has to fail on non-existent partitions, we cannot batch expressions.
    // That is because we actually have to check each separate expression for existence.
    // We could do a small optimization for the case where expr has all columns and all
    // operators are equality, if we assume those would always match one partition (which
    // may not be true with legacy, non-normalized column values). This is probably a
    // popular case but that's kinda hacky. Let's not do it for now.
    boolean canGroupExprs = ifExists;
    boolean mustPurge = (ast.getFirstChildWithType(HiveParser.KW_PURGE) != null);
    ReplicationSpec replicationSpec = new ReplicationSpec(ast);
    Table tab = null;
    try {
        tab = getTable(qualified);
    } catch (SemanticException se) {
        if (replicationSpec.isInReplicationScope() && ((se.getCause() instanceof InvalidTableException) || (se.getMessage().contains(ErrorMsg.INVALID_TABLE.getMsg())))) {
            // We just return in that case, no drop needed.
            return;
        // TODO : the contains message check is fragile, we should refactor SemanticException to be
        // queriable for error code, and not simply have a message
        // NOTE : IF_EXISTS might also want to invoke this, but there's a good possibility
        // that IF_EXISTS is stricter about table existence, and applies only to the ptn.
        // Therefore, ignoring IF_EXISTS here.
        } else {
            throw se;
        }
    }
    Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = getFullPartitionSpecs(ast, tab, canGroupExprs);
    if (partSpecs.isEmpty()) {
        // nothing to do
        return;
    }
    validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
    ReadEntity re = new ReadEntity(tab);
    re.noLockNeeded();
    inputs.add(re);
    addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists);
    DropTableDesc dropTblDesc = new DropTableDesc(getDotName(qualified), partSpecs, expectView ? TableType.VIRTUAL_VIEW : null, mustPurge, replicationSpec);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) DropTableDesc(org.apache.hadoop.hive.ql.plan.DropTableDesc) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList)

Example 38 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableBucketNum.

private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, HashMap<String, String> partSpec) throws SemanticException {
    Table tab = getTable(tblName, true);
    if (tab.getBucketCols() == null || tab.getBucketCols().isEmpty()) {
        throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg());
    }
    validateAlterTableType(tab, AlterTableTypes.ALTERBUCKETNUM);
    inputs.add(new ReadEntity(tab));
    int bucketNum = Integer.parseInt(ast.getChild(0).getText());
    AlterTableDesc alterBucketNum = new AlterTableDesc(tblName, partSpec, bucketNum);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterBucketNum)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 39 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeUnlockDatabase.

private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    inputs.add(new ReadEntity(getDatabase(dbName)));
    // Unlock database operation is to release the lock explicitly, the
    // operation itself don't need to be locked. Set the WriteEntity as
    // WriteType: DDL_NO_LOCK here, otherwise it will conflict with
    // Hive's transaction.
    outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK));
    UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName);
    DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc);
    rootTasks.add(TaskFactory.get(work));
    // Need to initialize the lock manager
    ctx.setNeedLockMgr(true);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) UnlockDatabaseDesc(org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 40 with DDLWork

use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.

the class DDLSemanticAnalyzer method analyzeDropDatabase.

private void analyzeDropDatabase(ASTNode ast) throws SemanticException {
    String dbName = unescapeIdentifier(ast.getChild(0).getText());
    boolean ifExists = false;
    boolean ifCascade = false;
    if (null != ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS)) {
        ifExists = true;
    }
    if (null != ast.getFirstChildWithType(HiveParser.TOK_CASCADE)) {
        ifCascade = true;
    }
    Database database = getDatabase(dbName, !ifExists);
    if (database == null) {
        return;
    }
    // if cascade=true, then we need to authorize the drop table action as well
    if (ifCascade) {
        // add the tables as well to outputs
        List<String> tableNames;
        // get names of all tables under this dbName
        try {
            tableNames = db.getAllTables(dbName);
        } catch (HiveException e) {
            throw new SemanticException(e);
        }
        // add tables to outputs
        if (tableNames != null) {
            for (String tableName : tableNames) {
                Table table = getTable(dbName, tableName, true);
                // We want no lock here, as the database lock will cover the tables,
                // and putting a lock will actually cause us to deadlock on ourselves.
                outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_NO_LOCK));
            }
        }
    }
    inputs.add(new ReadEntity(database));
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade, new ReplicationSpec());
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DropDatabaseDesc(org.apache.hadoop.hive.ql.plan.DropDatabaseDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)141 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)24 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)24 Table (org.apache.hadoop.hive.ql.metadata.Table)22 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)20 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)20 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)20 PrincipalDesc (org.apache.hadoop.hive.ql.plan.PrincipalDesc)20 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)19 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)19 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)19 HashMap (java.util.HashMap)17 LinkedHashMap (java.util.LinkedHashMap)16 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)14 Task (org.apache.hadoop.hive.ql.exec.Task)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)11 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)10 Serializable (java.io.Serializable)9