use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class ShowResourcePlanAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() > 1) {
throw new SemanticException("Invalid syntax for SHOW RESOURCE PLAN statement");
}
ctx.setResFile(ctx.getLocalTmpPath());
String resourcePlanName = (root.getChildCount() == 0) ? null : unescapeIdentifier(root.getChild(0).getText());
ShowResourcePlanDesc desc = new ShowResourcePlanDesc(resourcePlanName, ctx.getResFile().toString());
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(desc.getSchema()));
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class ShowMaterializedViewsAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() > 3) {
throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
}
ctx.setResFile(ctx.getLocalTmpPath());
String dbName = SessionState.get().getCurrentDatabase();
String viewNames = null;
switch(root.getChildCount()) {
case // Uses a pattern
1:
viewNames = unescapeSQLString(root.getChild(0).getText());
break;
case // Specifies a DB
2:
assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(root.getChild(1).getText());
db.validateDatabaseExists(dbName);
break;
case // Uses a pattern and specifies a DB
3:
assert (root.getChild(0).getType() == HiveParser.TOK_FROM);
dbName = unescapeIdentifier(root.getChild(1).getText());
viewNames = unescapeSQLString(root.getChild(2).getText());
db.validateDatabaseExists(dbName);
break;
default:
// No pattern or DB
break;
}
ShowMaterializedViewsDesc desc = new ShowMaterializedViewsDesc(ctx.getResFile(), dbName, viewNames);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);
task.setFetchSource(true);
setFetchTask(createFetchTask(ShowMaterializedViewsDesc.SCHEMA));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AbstractVMMappingAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() < 4 || root.getChildCount() > 5) {
throw new SemanticException("Invalid syntax for create or alter mapping.");
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
String entityType = root.getChild(1).getText();
String entityName = PlanUtils.stripQuotes(root.getChild(2).getText());
String poolPath = root.getChild(3).getType() == HiveParser.TOK_UNMANAGED ? null : // Null path => unmanaged
WMUtils.poolPath(root.getChild(3));
Integer ordering = root.getChildCount() == 5 ? Integer.valueOf(root.getChild(4).getText()) : null;
DDLDesc desc = getDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropResourcePlanAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() == 0) {
throw new SemanticException("Expected name in DROP RESOURCE PLAN statement");
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
boolean ifExists = false;
for (int i = 1; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
switch(child.getType()) {
case HiveParser.TOK_IFEXISTS:
ifExists = true;
break;
default:
throw new SemanticException("Invalid create arguments " + root.toStringTree());
}
}
DropResourcePlanDesc desc = new DropResourcePlanDesc(resourcePlanName, ifExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AbstractAlterDatabaseAnalyzer method addAlterDatabaseDesc.
protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException {
Database database = getDatabase(alterDesc.getDatabaseName());
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc)));
}
Aggregations