Search in sources :

Example 1 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowViewsAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() > 3) {
        throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String dbName = SessionState.get().getCurrentDatabase();
    String viewNames = null;
    switch(root.getChildCount()) {
        case // Uses a pattern
        1:
            viewNames = unescapeSQLString(root.getChild(0).getText());
            break;
        case // Specifies a DB
        2:
            assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(root.getChild(1).getText());
            db.validateDatabaseExists(dbName);
            break;
        case // Uses a pattern and specifies a DB
        3:
            assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(root.getChild(1).getText());
            viewNames = unescapeSQLString(root.getChild(2).getText());
            db.validateDatabaseExists(dbName);
            break;
        default:
            // No pattern or DB
            break;
    }
    ShowViewsDesc desc = new ShowViewsDesc(ctx.getResFile(), dbName, viewNames);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowViewsDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropViewAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String viewName = getUnescapedName((ASTNode) root.getChild(0));
    boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
    boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
    Table view = getTable(viewName, throwException);
    if (view != null) {
        inputs.add(new ReadEntity(view));
        outputs.add(new WriteEntity(view, WriteEntity.WriteType.DDL_EXCLUSIVE));
    }
    DropViewDesc desc = new DropViewDesc(viewName, ifExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 3 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterResourcePlanEnableAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() == 0) {
        console.printError("Activate a resource plan to enable workload management!");
        return;
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    boolean enable = false;
    boolean activate = false;
    boolean replace = false;
    for (int i = 1; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        switch(child.getType()) {
            case HiveParser.TOK_ACTIVATE:
                activate = true;
                if (child.getChildCount() > 1) {
                    throw new SemanticException("Expected 0 or 1 arguments " + root.toStringTree());
                } else if (child.getChildCount() == 1) {
                    if (child.getChild(0).getType() != HiveParser.TOK_REPLACE) {
                        throw new SemanticException("Incorrect syntax " + root.toStringTree());
                    }
                    replace = true;
                }
                break;
            case HiveParser.TOK_ENABLE:
                enable = true;
                break;
            case HiveParser.TOK_REPLACE:
                replace = true;
                break;
            default:
                throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
        }
    }
    AlterResourcePlanEnableDesc desc = new AlterResourcePlanEnableDesc(resourcePlanName, enable, activate, replace);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 4 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterResourcePlanUnsetAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    boolean unsetQueryParallelism = false;
    boolean unsetDefaultPool = false;
    for (int i = 1; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        switch(child.getType()) {
            case HiveParser.TOK_QUERY_PARALLELISM:
                if (child.getChildCount() != 0) {
                    throw new SemanticException("Expected zero argument");
                }
                unsetQueryParallelism = true;
                break;
            case HiveParser.TOK_DEFAULT_POOL:
                if (child.getChildCount() != 0) {
                    throw new SemanticException("Expected zero argument");
                }
                unsetDefaultPool = true;
                break;
            default:
                throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
        }
    }
    AlterResourcePlanUnsetDesc desc = new AlterResourcePlanUnsetDesc(resourcePlanName, unsetQueryParallelism, unsetDefaultPool);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 5 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CreateResourcePlanAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() == 0) {
        throw new SemanticException("Expected name in CREATE RESOURCE PLAN statement");
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    Integer queryParallelism = null;
    String likeName = null;
    boolean ifNotExists = false;
    for (int i = 1; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        switch(child.getType()) {
            case HiveParser.TOK_QUERY_PARALLELISM:
                // Note: later we may be able to set multiple things together (except LIKE).
                if (queryParallelism == null && likeName == null) {
                    queryParallelism = Integer.parseInt(child.getChild(0).getText());
                } else {
                    throw new SemanticException("Conflicting create arguments " + root.toStringTree());
                }
                break;
            case HiveParser.TOK_LIKERP:
                if (queryParallelism == null && likeName == null) {
                    likeName = unescapeIdentifier(child.getChild(0).getText());
                } else {
                    throw new SemanticException("Conflicting create arguments " + root.toStringTree());
                }
                break;
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            default:
                throw new SemanticException("Invalid create arguments " + root.toStringTree());
        }
    }
    CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism, likeName, ifNotExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8