use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class ShowViewsAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() > 3) {
throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
}
ctx.setResFile(ctx.getLocalTmpPath());
String dbName = SessionState.get().getCurrentDatabase();
String viewNames = null;
switch(root.getChildCount()) {
case // Uses a pattern
1:
viewNames = unescapeSQLString(root.getChild(0).getText());
break;
case // Specifies a DB
2:
assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(root.getChild(1).getText());
db.validateDatabaseExists(dbName);
break;
case // Uses a pattern and specifies a DB
3:
assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(root.getChild(1).getText());
viewNames = unescapeSQLString(root.getChild(2).getText());
db.validateDatabaseExists(dbName);
break;
default:
// No pattern or DB
break;
}
ShowViewsDesc desc = new ShowViewsDesc(ctx.getResFile(), dbName, viewNames);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(ShowViewsDesc.SCHEMA));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropViewAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String viewName = getUnescapedName((ASTNode) root.getChild(0));
boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
Table view = getTable(viewName, throwException);
if (view != null) {
inputs.add(new ReadEntity(view));
outputs.add(new WriteEntity(view, WriteEntity.WriteType.DDL_EXCLUSIVE));
}
DropViewDesc desc = new DropViewDesc(viewName, ifExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterResourcePlanEnableAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() == 0) {
console.printError("Activate a resource plan to enable workload management!");
return;
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
boolean enable = false;
boolean activate = false;
boolean replace = false;
for (int i = 1; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
switch(child.getType()) {
case HiveParser.TOK_ACTIVATE:
activate = true;
if (child.getChildCount() > 1) {
throw new SemanticException("Expected 0 or 1 arguments " + root.toStringTree());
} else if (child.getChildCount() == 1) {
if (child.getChild(0).getType() != HiveParser.TOK_REPLACE) {
throw new SemanticException("Incorrect syntax " + root.toStringTree());
}
replace = true;
}
break;
case HiveParser.TOK_ENABLE:
enable = true;
break;
case HiveParser.TOK_REPLACE:
replace = true;
break;
default:
throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
}
}
AlterResourcePlanEnableDesc desc = new AlterResourcePlanEnableDesc(resourcePlanName, enable, activate, replace);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterResourcePlanUnsetAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
boolean unsetQueryParallelism = false;
boolean unsetDefaultPool = false;
for (int i = 1; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
switch(child.getType()) {
case HiveParser.TOK_QUERY_PARALLELISM:
if (child.getChildCount() != 0) {
throw new SemanticException("Expected zero argument");
}
unsetQueryParallelism = true;
break;
case HiveParser.TOK_DEFAULT_POOL:
if (child.getChildCount() != 0) {
throw new SemanticException("Expected zero argument");
}
unsetDefaultPool = true;
break;
default:
throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
}
}
AlterResourcePlanUnsetDesc desc = new AlterResourcePlanUnsetDesc(resourcePlanName, unsetQueryParallelism, unsetDefaultPool);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class CreateResourcePlanAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() == 0) {
throw new SemanticException("Expected name in CREATE RESOURCE PLAN statement");
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
Integer queryParallelism = null;
String likeName = null;
boolean ifNotExists = false;
for (int i = 1; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
switch(child.getType()) {
case HiveParser.TOK_QUERY_PARALLELISM:
// Note: later we may be able to set multiple things together (except LIKE).
if (queryParallelism == null && likeName == null) {
queryParallelism = Integer.parseInt(child.getChild(0).getText());
} else {
throw new SemanticException("Conflicting create arguments " + root.toStringTree());
}
break;
case HiveParser.TOK_LIKERP:
if (queryParallelism == null && likeName == null) {
likeName = unescapeIdentifier(child.getChild(0).getText());
} else {
throw new SemanticException("Conflicting create arguments " + root.toStringTree());
}
break;
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
default:
throw new SemanticException("Invalid create arguments " + root.toStringTree());
}
}
CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism, likeName, ifNotExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addServiceOutput(conf, getOutputs());
}
Aggregations