use of org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableAddParts.
/**
* Add one or more partitions to a table. Useful when the data has been copied
* to the right location by some other process.
*
* @param ast
* The parsed command tree.
*
* @param expectView
* True for ALTER VIEW, false for ALTER TABLE.
*
* @throws SemanticException
* Parsing failed
*/
private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView) throws SemanticException {
// ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS;
Table tab = getTable(qualified);
boolean isView = tab.isView();
validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED));
int numCh = ast.getChildCount();
int start = ifNotExists ? 1 : 0;
String currentLocation = null;
Map<String, String> currentPart = null;
// Parser has done some verification, so the order of tokens doesn't need to be verified here.
AddPartitionDesc addPartitionDesc = new AddPartitionDesc(tab.getDbName(), tab.getTableName(), ifNotExists);
for (int num = start; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveParser.TOK_PARTSPEC:
if (currentPart != null) {
addPartitionDesc.addPartition(currentPart, currentLocation);
currentLocation = null;
}
currentPart = getValidatedPartSpec(tab, child, conf, true);
// validate reserved values
validatePartitionValues(currentPart);
break;
case HiveParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
if (isView) {
throw new SemanticException("LOCATION clause illegal for view partition");
}
currentLocation = unescapeSQLString(child.getChild(0).getText());
inputs.add(toReadEntity(currentLocation));
break;
default:
throw new SemanticException("Unknown child: " + child);
}
}
// add the last one
if (currentPart != null) {
addPartitionDesc.addPartition(currentPart, currentLocation);
}
if (this.conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
for (int index = 0; index < addPartitionDesc.getPartitionCount(); index++) {
OnePartitionDesc desc = addPartitionDesc.getPartition(index);
if (desc.getLocation() == null) {
if (desc.getPartParams() == null) {
desc.setPartParams(new HashMap<String, String>());
}
StatsSetupConst.setBasicStatsStateForCreateTable(desc.getPartParams(), StatsSetupConst.TRUE);
}
}
}
if (addPartitionDesc.getPartitionCount() == 0) {
// nothing to do
return;
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc), conf));
if (isView) {
// Compile internal query to capture underlying table partition dependencies
StringBuilder cmd = new StringBuilder();
cmd.append("SELECT * FROM ");
cmd.append(HiveUtils.unparseIdentifier(getDotName(qualified)));
cmd.append(" WHERE ");
boolean firstOr = true;
for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) {
AddPartitionDesc.OnePartitionDesc partitionDesc = addPartitionDesc.getPartition(i);
if (firstOr) {
firstOr = false;
} else {
cmd.append(" OR ");
}
boolean firstAnd = true;
cmd.append("(");
for (Map.Entry<String, String> entry : partitionDesc.getPartSpec().entrySet()) {
if (firstAnd) {
firstAnd = false;
} else {
cmd.append(" AND ");
}
cmd.append(HiveUtils.unparseIdentifier(entry.getKey(), conf));
cmd.append(" = '");
cmd.append(HiveUtils.escapeString(entry.getValue()));
cmd.append("'");
}
cmd.append(")");
}
Driver driver = new Driver(conf);
int rc = driver.compile(cmd.toString(), false);
if (rc != 0) {
throw new SemanticException(ErrorMsg.NO_VALID_PARTN.getMsg());
}
inputs.addAll(driver.getPlan().getInputs());
}
}
Aggregations