Search in sources :

Example 91 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropMaterializedViewAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String viewName = getUnescapedName((ASTNode) root.getChild(0));
    boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
    boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
    Table materializedView = getTable(viewName, throwException);
    if (materializedView != null) {
        inputs.add(new ReadEntity(materializedView));
        outputs.add(new WriteEntity(materializedView, WriteEntity.WriteType.DDL_EXCLUSIVE));
    }
    DropMaterializedViewDesc desc = new DropMaterializedViewDesc(viewName, ifExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 92 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterMaterializedViewRewriteAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    TableName tableName = getQualifiedTableName((ASTNode) root.getChild(0));
    // Value for the flag
    boolean rewriteEnable;
    switch(root.getChild(1).getType()) {
        case HiveParser.TOK_REWRITE_ENABLED:
            rewriteEnable = true;
            break;
        case HiveParser.TOK_REWRITE_DISABLED:
            rewriteEnable = false;
            break;
        default:
            throw new SemanticException("Invalid alter materialized view expression");
    }
    // It can be fully qualified name or use default database
    Table materializedViewTable = getTable(tableName, true);
    // only uses transactional (MM and ACID) tables
    if (rewriteEnable) {
        for (SourceTable sourceTable : materializedViewTable.getMVMetadata().getSourceTables()) {
            if (!AcidUtils.isTransactionalTable(sourceTable.getTable())) {
                throw new SemanticException("Automatic rewriting for materialized view cannot be enabled if the " + "materialized view uses non-transactional tables");
            }
        }
    }
    AlterMaterializedViewRewriteDesc desc = new AlterMaterializedViewRewriteDesc(tableName.getNotEmptyDbTable(), rewriteEnable);
    if (AcidUtils.isTransactionalTable(materializedViewTable)) {
        ddlDescWithWriteId = desc;
    }
    inputs.add(new ReadEntity(materializedViewTable));
    outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE));
    // Create task for alterMVRewriteDesc
    DDLWork work = new DDLWork(getInputs(), getOutputs(), desc);
    Task<?> targetTask = TaskFactory.get(work);
    // Create task to update rewrite flag as dependant of previous one
    MaterializedViewUpdateDesc materializedViewUpdateDesc = new MaterializedViewUpdateDesc(tableName.getNotEmptyDbTable(), rewriteEnable, !rewriteEnable, false);
    DDLWork updateDdlWork = new DDLWork(getInputs(), getOutputs(), materializedViewUpdateDesc);
    targetTask.addDependentTask(TaskFactory.get(updateDdlWork, conf));
    // Add root task
    rootTasks.add(targetTask);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) TableName(org.apache.hadoop.hive.common.TableName) Table(org.apache.hadoop.hive.ql.metadata.Table) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) MaterializedViewUpdateDesc(org.apache.hadoop.hive.ql.ddl.view.materialized.update.MaterializedViewUpdateDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 93 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CreateWMPoolAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    // TODO: allow defaults for e.g. scheduling policy.
    if (root.getChildCount() < 3) {
        throw new SemanticException("Expected more arguments: " + root.toStringTree());
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    String poolPath = WMUtils.poolPath(root.getChild(1));
    Double allocFraction = null;
    Integer queryParallelism = null;
    String schedulingPolicy = null;
    for (int i = 2; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        if (child.getChildCount() != 1) {
            throw new SemanticException("Expected 1 paramter for: " + child.getText());
        }
        String param = child.getChild(0).getText();
        switch(child.getType()) {
            case HiveParser.TOK_ALLOC_FRACTION:
                allocFraction = Double.parseDouble(param);
                break;
            case HiveParser.TOK_QUERY_PARALLELISM:
                queryParallelism = Integer.parseInt(param);
                break;
            case HiveParser.TOK_SCHEDULING_POLICY:
                schedulingPolicy = PlanUtils.stripQuotes(param);
                break;
            case HiveParser.TOK_PATH:
                throw new SemanticException("Invalid parameter path in create pool");
            default:
                throw new SemanticException("Invalid parameter " + child.getText() + " in create pool");
        }
    }
    if (allocFraction == null) {
        throw new SemanticException("alloc_fraction should be specified for a pool");
    }
    if (queryParallelism == null) {
        throw new SemanticException("query_parallelism should be specified for a pool");
    }
    CreateWMPoolDesc desc = new CreateWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism, schedulingPolicy);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 94 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableSetFileFormatAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    StorageFormat format = new StorageFormat(conf);
    ASTNode child = (ASTNode) command.getChild(0);
    if (!format.fillStorageFormat(child)) {
        throw new AssertionError("Unknown token " + child.getText());
    }
    AlterTableSetFileFormatDesc desc = new AlterTableSetFileFormatDesc(tableName, partitionSpec, format.getInputFormat(), format.getOutputFormat(), format.getSerde());
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_FILE_FORMAT, false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    setAcidDdlDesc(getTable(tableName), desc);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) StorageFormat(org.apache.hadoop.hive.ql.parse.StorageFormat)

Example 95 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterWMPoolAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() < 3) {
        throw new SemanticException("Invalid syntax for alter pool: " + root.toStringTree());
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    String poolPath = WMUtils.poolPath(root.getChild(1));
    Double allocFraction = null;
    Integer queryParallelism = null;
    String schedulingPolicy = null;
    boolean removeSchedulingPolicy = false;
    String newPath = null;
    for (int i = 2; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        if (child.getChildCount() != 1) {
            throw new SemanticException("Invalid syntax in alter pool expected parameter.");
        }
        Tree param = child.getChild(0);
        switch(child.getType()) {
            case HiveParser.TOK_ALLOC_FRACTION:
                allocFraction = Double.parseDouble(param.getText());
                break;
            case HiveParser.TOK_QUERY_PARALLELISM:
                queryParallelism = Integer.parseInt(param.getText());
                break;
            case HiveParser.TOK_SCHEDULING_POLICY:
                if (param.getType() != HiveParser.TOK_NULL) {
                    schedulingPolicy = PlanUtils.stripQuotes(param.getText());
                } else {
                    removeSchedulingPolicy = true;
                }
                break;
            case HiveParser.TOK_PATH:
                newPath = WMUtils.poolPath(param);
                break;
            default:
                throw new SemanticException("Incorrect alter syntax: " + child.toStringTree());
        }
    }
    AlterWMPoolDesc desc = new AlterWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism, schedulingPolicy, removeSchedulingPolicy, newPath);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8