use of org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork in project hive by apache.
the class DDLTask method truncateTable.
private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException {
if (truncateTableDesc.getColumnIndexes() != null) {
ColumnTruncateWork truncateWork = new ColumnTruncateWork(truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(), truncateTableDesc.getOutputDir());
truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx());
truncateWork.setMapperCannotSpanPartns(true);
DriverContext driverCxt = new DriverContext();
ColumnTruncateTask taskExec = new ColumnTruncateTask();
taskExec.initialize(queryState, null, driverCxt, null);
taskExec.setWork(truncateWork);
taskExec.setQueryPlan(this.getQueryPlan());
subtask = taskExec;
int ret = taskExec.execute(driverCxt);
if (subtask.getException() != null) {
setException(subtask.getException());
}
return ret;
}
String tableName = truncateTableDesc.getTableName();
Map<String, String> partSpec = truncateTableDesc.getPartSpec();
Table table = db.getTable(tableName, true);
try {
// this is not transactional
for (Path location : getLocations(db, table, partSpec)) {
FileSystem fs = location.getFileSystem(conf);
HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf);
if (!shim.isPathEncrypted(location)) {
HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location);
FileStatus targetStatus = fs.getFileStatus(location);
String targetGroup = targetStatus == null ? null : targetStatus.getGroup();
FileUtils.moveToTrash(fs, location, conf);
fs.mkdirs(location);
HdfsUtils.setFullFileStatus(conf, status, targetGroup, fs, location, false);
} else {
FileStatus[] statuses = fs.listStatus(location, FileUtils.HIDDEN_FILES_PATH_FILTER);
if (statuses == null || statuses.length == 0) {
continue;
}
boolean success = Hive.trashFiles(fs, statuses, conf);
if (!success) {
throw new HiveException("Error in deleting the contents of " + location.toString());
}
}
}
} catch (Exception e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
}
return 0;
}
Aggregations