Search in sources :

Example 1 with AlterResourcePlanDesc

use of org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterResourcePlan.

private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException {
    if (ast.getChildCount() < 1) {
        throw new SemanticException("Incorrect syntax");
    }
    Tree nameOrGlobal = ast.getChild(0);
    switch(nameOrGlobal.getType()) {
        case HiveParser.TOK_ENABLE:
            // This command exists solely to output this message. TODO: can we do it w/o an error?
            throw new SemanticException("Activate a resource plan to enable workload management");
        case HiveParser.TOK_DISABLE:
            WMNullableResourcePlan anyRp = new WMNullableResourcePlan();
            anyRp.setStatus(WMResourcePlanStatus.ENABLED);
            AlterResourcePlanDesc desc = new AlterResourcePlanDesc(anyRp, null, false, false, true, false);
            addServiceOutput();
            rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
            return;
        // Continue to handle changes to a specific plan.
        default:
    }
    if (ast.getChildCount() < 2) {
        throw new SemanticException("Invalid syntax for ALTER RESOURCE PLAN statement");
    }
    String rpName = unescapeIdentifier(ast.getChild(0).getText());
    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
    boolean isEnableActivate = false, isReplace = false;
    boolean validate = false;
    for (int i = 1; i < ast.getChildCount(); ++i) {
        Tree child = ast.getChild(i);
        switch(child.getType()) {
            case HiveParser.TOK_VALIDATE:
                validate = true;
                break;
            case HiveParser.TOK_ACTIVATE:
                if (resourcePlan.getStatus() == WMResourcePlanStatus.ENABLED) {
                    isEnableActivate = true;
                }
                if (child.getChildCount() > 1) {
                    throw new SemanticException("Expected 0 or 1 arguments " + ast.toStringTree());
                } else if (child.getChildCount() == 1) {
                    if (child.getChild(0).getType() != HiveParser.TOK_REPLACE) {
                        throw new SemanticException("Incorrect syntax " + ast.toStringTree());
                    }
                    isReplace = true;
                    // Implied.
                    isEnableActivate = false;
                }
                resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
                break;
            case HiveParser.TOK_ENABLE:
                if (resourcePlan.getStatus() == WMResourcePlanStatus.ACTIVE) {
                    isEnableActivate = !isReplace;
                } else {
                    resourcePlan.setStatus(WMResourcePlanStatus.ENABLED);
                }
                break;
            case HiveParser.TOK_DISABLE:
                resourcePlan.setStatus(WMResourcePlanStatus.DISABLED);
                break;
            case HiveParser.TOK_REPLACE:
                isReplace = true;
                if (child.getChildCount() > 1) {
                    throw new SemanticException("Expected 0 or 1 arguments " + ast.toStringTree());
                } else if (child.getChildCount() == 1) {
                    // Replace is essentially renaming a plan to the name of an existing plan, with backup.
                    resourcePlan.setName(unescapeIdentifier(child.getChild(0).getText()));
                } else {
                    resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
                }
                break;
            case HiveParser.TOK_QUERY_PARALLELISM:
                {
                    if (child.getChildCount() != 1) {
                        throw new SemanticException("Expected one argument");
                    }
                    Tree val = child.getChild(0);
                    resourcePlan.setIsSetQueryParallelism(true);
                    if (val.getType() == HiveParser.TOK_NULL) {
                        resourcePlan.unsetQueryParallelism();
                    } else {
                        resourcePlan.setQueryParallelism(Integer.parseInt(val.getText()));
                    }
                    break;
                }
            case HiveParser.TOK_DEFAULT_POOL:
                {
                    if (child.getChildCount() != 1) {
                        throw new SemanticException("Expected one argument");
                    }
                    Tree val = child.getChild(0);
                    resourcePlan.setIsSetDefaultPoolPath(true);
                    if (val.getType() == HiveParser.TOK_NULL) {
                        resourcePlan.unsetDefaultPoolPath();
                    } else {
                        resourcePlan.setDefaultPoolPath(poolPath(child.getChild(0)));
                    }
                    break;
                }
            case HiveParser.TOK_RENAME:
                if (child.getChildCount() != 1) {
                    throw new SemanticException("Expected one argument");
                }
                resourcePlan.setName(unescapeIdentifier(child.getChild(0).getText()));
                break;
            default:
                throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
        }
    }
    AlterResourcePlanDesc desc = new AlterResourcePlanDesc(resourcePlan, rpName, validate, isEnableActivate, false, isReplace);
    if (validate) {
        ctx.setResFile(ctx.getLocalTmpPath());
        desc.setResFile(ctx.getResFile().toString());
    }
    addServiceOutput();
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    if (validate) {
        setFetchTask(createFetchTask(AlterResourcePlanDesc.getSchema()));
    }
}
Also used : WMNullableResourcePlan(org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) AlterResourcePlanDesc(org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc)

Aggregations

CommonTree (org.antlr.runtime.tree.CommonTree)1 Tree (org.antlr.runtime.tree.Tree)1 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)1 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)1 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)1 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)1 WMNullableResourcePlan (org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan)1 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)1 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)1 AlterResourcePlanDesc (org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc)1 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)1