Search in sources :

Example 1 with CreateOrAlterWMPoolDesc

use of org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeCreatePool.

private void analyzeCreatePool(ASTNode ast) throws SemanticException {
    // TODO: allow defaults for e.g. scheduling policy.
    if (ast.getChildCount() < 3) {
        throw new SemanticException("Expected more arguments: " + ast.toStringTree());
    }
    String rpName = unescapeIdentifier(ast.getChild(0).getText());
    String poolPath = poolPath(ast.getChild(1));
    WMPool pool = new WMPool(rpName, poolPath);
    for (int i = 2; i < ast.getChildCount(); ++i) {
        Tree child = ast.getChild(i);
        if (child.getChildCount() != 1) {
            throw new SemanticException("Expected 1 paramter for: " + child.getText());
        }
        String param = child.getChild(0).getText();
        switch(child.getType()) {
            case HiveParser.TOK_ALLOC_FRACTION:
                pool.setAllocFraction(Double.parseDouble(param));
                break;
            case HiveParser.TOK_QUERY_PARALLELISM:
                pool.setQueryParallelism(Integer.parseInt(param));
                break;
            case HiveParser.TOK_SCHEDULING_POLICY:
                String schedulingPolicyStr = PlanUtils.stripQuotes(param);
                if (!MetaStoreUtils.isValidSchedulingPolicy(schedulingPolicyStr)) {
                    throw new SemanticException("Invalid scheduling policy " + schedulingPolicyStr);
                }
                pool.setSchedulingPolicy(schedulingPolicyStr);
                break;
            case HiveParser.TOK_PATH:
                throw new SemanticException("Invalid parameter path in create pool");
        }
    }
    if (!pool.isSetAllocFraction()) {
        throw new SemanticException("alloc_fraction should be specified for a pool");
    }
    if (!pool.isSetQueryParallelism()) {
        throw new SemanticException("query_parallelism should be specified for a pool");
    }
    CreateOrAlterWMPoolDesc desc = new CreateOrAlterWMPoolDesc(pool, poolPath, false);
    addServiceOutput();
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateOrAlterWMPoolDesc(org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) WMPool(org.apache.hadoop.hive.metastore.api.WMPool) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 2 with CreateOrAlterWMPoolDesc

use of org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterPool.

private void analyzeAlterPool(ASTNode ast) throws SemanticException {
    if (ast.getChildCount() < 3) {
        throw new SemanticException("Invalid syntax for alter pool: " + ast.toStringTree());
    }
    String rpName = unescapeIdentifier(ast.getChild(0).getText());
    Tree poolTarget = ast.getChild(1);
    boolean isUnmanagedPool = false;
    String poolPath = null;
    if (poolTarget.getType() == HiveParser.TOK_UNMANAGED) {
        isUnmanagedPool = true;
    } else {
        poolPath = poolPath(ast.getChild(1));
    }
    WMNullablePool poolChanges = null;
    boolean hasTrigger = false;
    for (int i = 2; i < ast.getChildCount(); ++i) {
        Tree child = ast.getChild(i);
        if (child.getChildCount() != 1) {
            throw new SemanticException("Invalid syntax in alter pool expected parameter.");
        }
        Tree param = child.getChild(0);
        if (child.getType() == HiveParser.TOK_ADD_TRIGGER || child.getType() == HiveParser.TOK_DROP_TRIGGER) {
            hasTrigger = true;
            boolean drop = child.getType() == HiveParser.TOK_DROP_TRIGGER;
            String triggerName = unescapeIdentifier(param.getText());
            rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new CreateOrDropTriggerToPoolMappingDesc(rpName, triggerName, poolPath, drop, isUnmanagedPool))));
        } else {
            if (isUnmanagedPool) {
                throw new SemanticException("Cannot alter the unmanaged pool");
            }
            if (poolChanges == null) {
                poolChanges = new WMNullablePool(rpName, null);
            }
            switch(child.getType()) {
                case HiveParser.TOK_ALLOC_FRACTION:
                    poolChanges.setAllocFraction(Double.parseDouble(param.getText()));
                    break;
                case HiveParser.TOK_QUERY_PARALLELISM:
                    poolChanges.setQueryParallelism(Integer.parseInt(param.getText()));
                    break;
                case HiveParser.TOK_SCHEDULING_POLICY:
                    poolChanges.setIsSetSchedulingPolicy(true);
                    if (param.getType() != HiveParser.TOK_NULL) {
                        poolChanges.setSchedulingPolicy(PlanUtils.stripQuotes(param.getText()));
                    }
                    break;
                case HiveParser.TOK_PATH:
                    poolChanges.setPoolPath(poolPath(param));
                    break;
                default:
                    throw new SemanticException("Incorrect alter syntax: " + child.toStringTree());
            }
        }
    }
    if (poolChanges != null || hasTrigger) {
        addServiceOutput();
    }
    if (poolChanges != null) {
        rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new CreateOrAlterWMPoolDesc(poolChanges, poolPath, true))));
    }
}
Also used : DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) CreateOrDropTriggerToPoolMappingDesc(org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc) CreateOrAlterWMPoolDesc(org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc) CommonTree(org.antlr.runtime.tree.CommonTree) Tree(org.antlr.runtime.tree.Tree) WMNullablePool(org.apache.hadoop.hive.metastore.api.WMNullablePool) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Aggregations

CommonTree (org.antlr.runtime.tree.CommonTree)2 Tree (org.antlr.runtime.tree.Tree)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)2 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)2 NotNullConstraint (org.apache.hadoop.hive.ql.metadata.NotNullConstraint)2 CreateOrAlterWMPoolDesc (org.apache.hadoop.hive.ql.plan.CreateOrAlterWMPoolDesc)2 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)2 WMNullablePool (org.apache.hadoop.hive.metastore.api.WMNullablePool)1 WMPool (org.apache.hadoop.hive.metastore.api.WMPool)1 CreateOrDropTriggerToPoolMappingDesc (org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc)1