use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class ImportSemanticAnalyzer method addSinglePartition.
private static Task<?> addSinglePartition(URI fromURI, FileSystem fs, ImportTableDesc tblDesc, Table table, Warehouse wh, AddPartitionDesc addPartitionDesc, ReplicationSpec replicationSpec, EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, IOException, HiveException {
AddPartitionDesc.OnePartitionDesc partSpec = addPartitionDesc.getPartition(0);
if (tblDesc.isExternal() && tblDesc.getLocation() == null) {
x.getLOG().debug("Importing in-place: adding AddPart for partition " + partSpecToString(partSpec.getPartSpec()));
// addPartitionDesc already has the right partition location
Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf());
return addPartTask;
} else {
String srcLocation = partSpec.getLocation();
fixLocationInPartSpec(fs, tblDesc, table, wh, replicationSpec, partSpec, x);
x.getLOG().debug("adding dependent CopyWork/AddPart/MoveWork for partition " + partSpecToString(partSpec.getPartSpec()) + " with source location: " + srcLocation);
Path tgtLocation = new Path(partSpec.getLocation());
Path tmpPath = x.getCtx().getExternalTmpPath(tgtLocation);
Task<?> copyTask = ReplCopyTask.getLoadCopyTask(replicationSpec, new Path(srcLocation), tmpPath, x.getConf());
Task<?> addPartTask = TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), addPartitionDesc), x.getConf());
LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath, Utilities.getTableDesc(table), partSpec.getPartSpec(), true);
loadTableWork.setInheritTableSpecs(false);
Task<?> loadPartTask = TaskFactory.get(new MoveWork(x.getInputs(), x.getOutputs(), loadTableWork, null, false), x.getConf());
copyTask.addDependentTask(loadPartTask);
addPartTask.addDependentTask(loadPartTask);
x.getTasks().add(copyTask);
return addPartTask;
}
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeSwitchDatabase.
private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
Database database = getDatabase(dbName, true);
ReadEntity dbReadEntity = new ReadEntity(database);
dbReadEntity.noLockNeeded();
inputs.add(dbReadEntity);
SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), switchDatabaseDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method handleAlterTableDisableStoredAsDirs.
/**
* Handle alter table <name> not stored as directories
*
* @param tableName
* @param tab
* @throws SemanticException
*/
private void handleAlterTableDisableStoredAsDirs(String tableName, Table tab) throws SemanticException {
List<String> skewedColNames = tab.getSkewedColNames();
List<List<String>> skewedColValues = tab.getSkewedColValues();
if ((skewedColNames == null) || (skewedColNames.size() == 0) || (skewedColValues == null) || (skewedColValues.size() == 0)) {
throw new SemanticException(ErrorMsg.ALTER_TBL_STOREDASDIR_NOT_SKEWED.getMsg(tableName));
}
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, false, skewedColNames, skewedColValues);
alterTblDesc.setStoredAsSubDirectories(false);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableBucketNum.
private void analyzeAlterTableBucketNum(ASTNode ast, String tblName, HashMap<String, String> partSpec) throws SemanticException {
Table tab = getTable(tblName, true);
if (tab.getBucketCols() == null || tab.getBucketCols().isEmpty()) {
throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg());
}
validateAlterTableType(tab, AlterTableTypes.ALTERBUCKETNUM);
inputs.add(new ReadEntity(tab));
int bucketNum = Integer.parseInt(ast.getChild(0).getText());
AlterTableDesc alterBucketNum = new AlterTableDesc(tblName, partSpec, bucketNum);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterBucketNum), conf));
}
use of org.apache.hadoop.hive.ql.plan.DDLWork in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableArchive.
private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive) throws SemanticException {
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) {
throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg());
}
Table tab = getTable(qualified);
// partition name to value
List<Map<String, String>> partSpecs = getPartitionSpecs(tab, ast);
addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK);
validateAlterTableType(tab, AlterTableTypes.ARCHIVE);
inputs.add(new ReadEntity(tab));
if (partSpecs.size() > 1) {
throw new SemanticException(isUnArchive ? ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() : ErrorMsg.ARCHIVE_ON_MULI_PARTS.getMsg());
}
if (partSpecs.size() == 0) {
throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg());
}
Map<String, String> partSpec = partSpecs.get(0);
try {
isValidPrefixSpec(tab, partSpec);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(getDotName(qualified), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc), conf));
}
Aggregations