use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class DropMaterializedViewAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String viewName = getUnescapedName((ASTNode) root.getChild(0));
boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
Table materializedView = getTable(viewName, throwException);
if (materializedView != null) {
inputs.add(new ReadEntity(materializedView));
outputs.add(new WriteEntity(materializedView, WriteEntity.WriteType.DDL_EXCLUSIVE));
}
DropMaterializedViewDesc desc = new DropMaterializedViewDesc(viewName, ifExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterMaterializedViewRewriteAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
TableName tableName = getQualifiedTableName((ASTNode) root.getChild(0));
// Value for the flag
boolean rewriteEnable;
switch(root.getChild(1).getType()) {
case HiveParser.TOK_REWRITE_ENABLED:
rewriteEnable = true;
break;
case HiveParser.TOK_REWRITE_DISABLED:
rewriteEnable = false;
break;
default:
throw new SemanticException("Invalid alter materialized view expression");
}
// It can be fully qualified name or use default database
Table materializedViewTable = getTable(tableName, true);
// only uses transactional (MM and ACID) tables
if (rewriteEnable) {
for (SourceTable sourceTable : materializedViewTable.getMVMetadata().getSourceTables()) {
if (!AcidUtils.isTransactionalTable(sourceTable.getTable())) {
throw new SemanticException("Automatic rewriting for materialized view cannot be enabled if the " + "materialized view uses non-transactional tables");
}
}
}
AlterMaterializedViewRewriteDesc desc = new AlterMaterializedViewRewriteDesc(tableName.getNotEmptyDbTable(), rewriteEnable);
if (AcidUtils.isTransactionalTable(materializedViewTable)) {
ddlDescWithWriteId = desc;
}
inputs.add(new ReadEntity(materializedViewTable));
outputs.add(new WriteEntity(materializedViewTable, WriteEntity.WriteType.DDL_EXCLUSIVE));
// Create task for alterMVRewriteDesc
DDLWork work = new DDLWork(getInputs(), getOutputs(), desc);
Task<?> targetTask = TaskFactory.get(work);
// Create task to update rewrite flag as dependant of previous one
MaterializedViewUpdateDesc materializedViewUpdateDesc = new MaterializedViewUpdateDesc(tableName.getNotEmptyDbTable(), rewriteEnable, !rewriteEnable, false);
DDLWork updateDdlWork = new DDLWork(getInputs(), getOutputs(), materializedViewUpdateDesc);
targetTask.addDependentTask(TaskFactory.get(updateDdlWork, conf));
// Add root task
rootTasks.add(targetTask);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class CreateWMPoolAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
// TODO: allow defaults for e.g. scheduling policy.
if (root.getChildCount() < 3) {
throw new SemanticException("Expected more arguments: " + root.toStringTree());
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
String poolPath = WMUtils.poolPath(root.getChild(1));
Double allocFraction = null;
Integer queryParallelism = null;
String schedulingPolicy = null;
for (int i = 2; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
if (child.getChildCount() != 1) {
throw new SemanticException("Expected 1 paramter for: " + child.getText());
}
String param = child.getChild(0).getText();
switch(child.getType()) {
case HiveParser.TOK_ALLOC_FRACTION:
allocFraction = Double.parseDouble(param);
break;
case HiveParser.TOK_QUERY_PARALLELISM:
queryParallelism = Integer.parseInt(param);
break;
case HiveParser.TOK_SCHEDULING_POLICY:
schedulingPolicy = PlanUtils.stripQuotes(param);
break;
case HiveParser.TOK_PATH:
throw new SemanticException("Invalid parameter path in create pool");
default:
throw new SemanticException("Invalid parameter " + child.getText() + " in create pool");
}
}
if (allocFraction == null) {
throw new SemanticException("alloc_fraction should be specified for a pool");
}
if (queryParallelism == null) {
throw new SemanticException("query_parallelism should be specified for a pool");
}
CreateWMPoolDesc desc = new CreateWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism, schedulingPolicy);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addServiceOutput(conf, getOutputs());
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterTableSetFileFormatAnalyzer method analyzeCommand.
@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
StorageFormat format = new StorageFormat(conf);
ASTNode child = (ASTNode) command.getChild(0);
if (!format.fillStorageFormat(child)) {
throw new AssertionError("Unknown token " + child.getText());
}
AlterTableSetFileFormatDesc desc = new AlterTableSetFileFormatDesc(tableName, partitionSpec, format.getInputFormat(), format.getOutputFormat(), format.getSerde());
addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_FILE_FORMAT, false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
setAcidDdlDesc(getTable(tableName), desc);
}
use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.
the class AlterWMPoolAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
if (root.getChildCount() < 3) {
throw new SemanticException("Invalid syntax for alter pool: " + root.toStringTree());
}
String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
String poolPath = WMUtils.poolPath(root.getChild(1));
Double allocFraction = null;
Integer queryParallelism = null;
String schedulingPolicy = null;
boolean removeSchedulingPolicy = false;
String newPath = null;
for (int i = 2; i < root.getChildCount(); ++i) {
Tree child = root.getChild(i);
if (child.getChildCount() != 1) {
throw new SemanticException("Invalid syntax in alter pool expected parameter.");
}
Tree param = child.getChild(0);
switch(child.getType()) {
case HiveParser.TOK_ALLOC_FRACTION:
allocFraction = Double.parseDouble(param.getText());
break;
case HiveParser.TOK_QUERY_PARALLELISM:
queryParallelism = Integer.parseInt(param.getText());
break;
case HiveParser.TOK_SCHEDULING_POLICY:
if (param.getType() != HiveParser.TOK_NULL) {
schedulingPolicy = PlanUtils.stripQuotes(param.getText());
} else {
removeSchedulingPolicy = true;
}
break;
case HiveParser.TOK_PATH:
newPath = WMUtils.poolPath(param);
break;
default:
throw new SemanticException("Incorrect alter syntax: " + child.toStringTree());
}
}
AlterWMPoolDesc desc = new AlterWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism, schedulingPolicy, removeSchedulingPolicy, newPath);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
DDLUtils.addServiceOutput(conf, getOutputs());
}
Aggregations