use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DescFunctionAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
ctx.setResFile(ctx.getLocalTmpPath());
if (root.getChildCount() < 1 || root.getChildCount() > 2) {
throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
}
String functionName = stripQuotes(root.getChild(0).getText());
boolean isExtended = root.getChildCount() == 2;
DescFunctionDesc desc = new DescFunctionDesc(ctx.getResFile(), functionName, isExtended);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class CreateMacroAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String macroName = root.getChild(0).getText();
if (FunctionUtils.isQualifiedFunctionName(macroName)) {
throw new SemanticException("Temporary macro cannot be created with a qualified name.");
}
List<FieldSchema> arguments = getColumns((ASTNode) root.getChild(1), true, conf);
Set<String> actualColumnNames = getActualColumnNames(root, arguments);
RowResolver rowResolver = new RowResolver();
ArrayList<String> macroColumnNames = new ArrayList<String>(arguments.size());
ArrayList<TypeInfo> macroColumnTypes = new ArrayList<TypeInfo>(arguments.size());
getMacroColumnData(arguments, actualColumnNames, rowResolver, macroColumnNames, macroColumnTypes);
ExprNodeDesc body = getBody(root, arguments, rowResolver);
CreateMacroDesc desc = new CreateMacroDesc(macroName, macroColumnNames, macroColumnTypes, body);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
// This restricts macro creation to privileged users.
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class LockDatabaseAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String databaseName = unescapeIdentifier(root.getChild(0).getText());
String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase());
inputs.add(new ReadEntity(getDatabase(databaseName)));
// Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked.
// Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction.
outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK));
LockDatabaseDesc desc = new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID), ctx.getCmd());
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
ctx.setNeedLockMgr(true);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class ShowCreateDatabaseAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
ctx.setResFile(ctx.getLocalTmpPath());
String databaseName = getUnescapedName((ASTNode) root.getChild(0));
Database database = getDatabase(databaseName);
inputs.add(new ReadEntity(database));
ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile());
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(ShowCreateDatabaseDesc.SCHEMA));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class ShowFunctionsAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
ctx.setResFile(ctx.getLocalTmpPath());
ShowFunctionsDesc desc;
if (root.getChildCount() > 0) {
assert (root.getChildCount() == 2);
assert (root.getChild(0).getType() == HiveParser.KW_LIKE);
String functionNames = stripQuotes(root.getChild(1).getText());
desc = new ShowFunctionsDesc(ctx.getResFile(), functionNames);
} else {
desc = new ShowFunctionsDesc(ctx.getResFile());
}
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA));
}
Aggregations