Search in sources :

Example 21 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DescFunctionAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    ctx.setResFile(ctx.getLocalTmpPath());
    if (root.getChildCount() < 1 || root.getChildCount() > 2) {
        throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
    }
    String functionName = stripQuotes(root.getChild(0).getText());
    boolean isExtended = root.getChildCount() == 2;
    DescFunctionDesc desc = new DescFunctionDesc(ctx.getResFile(), functionName, isExtended);
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 22 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CreateMacroAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String macroName = root.getChild(0).getText();
    if (FunctionUtils.isQualifiedFunctionName(macroName)) {
        throw new SemanticException("Temporary macro cannot be created with a qualified name.");
    }
    List<FieldSchema> arguments = getColumns((ASTNode) root.getChild(1), true, conf);
    Set<String> actualColumnNames = getActualColumnNames(root, arguments);
    RowResolver rowResolver = new RowResolver();
    ArrayList<String> macroColumnNames = new ArrayList<String>(arguments.size());
    ArrayList<TypeInfo> macroColumnTypes = new ArrayList<TypeInfo>(arguments.size());
    getMacroColumnData(arguments, actualColumnNames, rowResolver, macroColumnNames, macroColumnTypes);
    ExprNodeDesc body = getBody(root, arguments, rowResolver);
    CreateMacroDesc desc = new CreateMacroDesc(macroName, macroColumnNames, macroColumnTypes, body);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
    // This restricts macro creation to privileged users.
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 23 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class LockDatabaseAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String databaseName = unescapeIdentifier(root.getChild(0).getText());
    String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase());
    inputs.add(new ReadEntity(getDatabase(databaseName)));
    // Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked.
    // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction.
    outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK));
    LockDatabaseDesc desc = new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd());
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    ctx.setNeedLockMgr(true);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 24 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowCreateDatabaseAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    ctx.setResFile(ctx.getLocalTmpPath());
    String databaseName = getUnescapedName((ASTNode) root.getChild(0));
    Database database = getDatabase(databaseName);
    inputs.add(new ReadEntity(database));
    ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile());
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 25 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class ShowFunctionsAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    ctx.setResFile(ctx.getLocalTmpPath());
    ShowFunctionsDesc desc;
    if (root.getChildCount() > 0) {
        assert (root.getChildCount() == 2);
        assert (root.getChild(0).getType() == HiveParser.KW_LIKE);
        String functionNames = stripQuotes(root.getChild(1).getText());
        desc = new ShowFunctionsDesc(ctx.getResFile(), functionNames);
    } else {
        desc = new ShowFunctionsDesc(ctx.getResFile());
    }
    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
    rootTasks.add(task);
    task.setFetchSource(true);
    setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8