Search in sources :

Example 6 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowResourcePlanAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() > 1) {
        throw new SemanticException("Invalid syntax for SHOW RESOURCE PLAN statement");
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String resourcePlanName = (root.getChildCount() == 0) ? null : unescapeIdentifier(root.getChild(0).getText());
    ShowResourcePlanDesc desc = new ShowResourcePlanDesc(resourcePlanName, ctx.getResFile().toString());
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(desc.getSchema()));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 7 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowMaterializedViewsAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() > 3) {
        throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    String dbName = SessionState.get().getCurrentDatabase();
    String viewNames = null;
    switch(root.getChildCount()) {
        case // Uses a pattern
        1:
            viewNames = unescapeSQLString(root.getChild(0).getText());
            break;
        case // Specifies a DB
        2:
            assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(root.getChild(1).getText());
            db.validateDatabaseExists(dbName);
            break;
        case // Uses a pattern and specifies a DB
        3:
            assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
            dbName = unescapeIdentifier(root.getChild(1).getText());
            viewNames = unescapeSQLString(root.getChild(2).getText());
            db.validateDatabaseExists(dbName);
            break;
        default:
            // No pattern or DB
            break;
    }
    ShowMaterializedViewsDesc desc = new ShowMaterializedViewsDesc(ctx.getResFile(), dbName, viewNames);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowMaterializedViewsDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 8 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AbstractVMMappingAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() < 4 || root.getChildCount() > 5) {
        throw new SemanticException("Invalid syntax for create or alter mapping.");
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    String entityType = root.getChild(1).getText();
    String entityName = PlanUtils.stripQuotes(root.getChild(2).getText());
    String poolPath = root.getChild(3).getType() == HiveParser.TOK_UNMANAGED ? null : // Null path => unmanaged
    WMUtils.poolPath(root.getChild(3));
    Integer ordering = root.getChildCount() == 5 ? Integer.valueOf(root.getChild(4).getText()) : null;
    DDLDesc desc = getDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DDLDesc(org.apache.hadoop.hive.ql.ddl.DDLDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 9 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropResourcePlanAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() == 0) {
        throw new SemanticException("Expected name in DROP RESOURCE PLAN statement");
    }
    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
    boolean ifExists = false;
    for (int i = 1; i < root.getChildCount(); ++i) {
        Tree child = root.getChild(i);
        switch(child.getType()) {
            case HiveParser.TOK_IFEXISTS:
                ifExists = true;
                break;
            default:
                throw new SemanticException("Invalid create arguments " + root.toStringTree());
        }
    }
    DropResourcePlanDesc desc = new DropResourcePlanDesc(resourcePlanName, ifExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    DDLUtils.addServiceOutput(conf, getOutputs());
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Tree(org.antlr.runtime.tree.Tree) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 10 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AbstractAlterDatabaseAnalyzer method addAlterDatabaseDesc.

protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException {
    Database database = getDatabase(alterDesc.getDatabaseName());
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8