Search in sources :

Example 1 with ColumnTruncateTask

use of org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateTask in project hive by apache.

the class DDLTask method truncateTable.

private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException {
    if (truncateTableDesc.getColumnIndexes() != null) {
        ColumnTruncateWork truncateWork = new ColumnTruncateWork(truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(), truncateTableDesc.getOutputDir());
        truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx());
        truncateWork.setMapperCannotSpanPartns(true);
        DriverContext driverCxt = new DriverContext();
        ColumnTruncateTask taskExec = new ColumnTruncateTask();
        taskExec.initialize(queryState, null, driverCxt, null);
        taskExec.setWork(truncateWork);
        taskExec.setQueryPlan(this.getQueryPlan());
        subtask = taskExec;
        int ret = taskExec.execute(driverCxt);
        if (subtask.getException() != null) {
            setException(subtask.getException());
        }
        return ret;
    }
    String tableName = truncateTableDesc.getTableName();
    Map<String, String> partSpec = truncateTableDesc.getPartSpec();
    Table table = db.getTable(tableName, true);
    try {
        // this is not transactional
        for (Path location : getLocations(db, table, partSpec)) {
            FileSystem fs = location.getFileSystem(conf);
            HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf);
            if (!shim.isPathEncrypted(location)) {
                HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location);
                FileStatus targetStatus = fs.getFileStatus(location);
                String targetGroup = targetStatus == null ? null : targetStatus.getGroup();
                FileUtils.moveToTrash(fs, location, conf);
                fs.mkdirs(location);
                HdfsUtils.setFullFileStatus(conf, status, targetGroup, fs, location, false);
            } else {
                FileStatus[] statuses = fs.listStatus(location, FileUtils.HIDDEN_FILES_PATH_FILTER);
                if (statuses == null || statuses.length == 0) {
                    continue;
                }
                boolean success = Hive.trashFiles(fs, statuses, conf);
                if (!success) {
                    throw new HiveException("Error in deleting the contents of " + location.toString());
                }
            }
        }
    } catch (Exception e) {
        throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) DriverContext(org.apache.hadoop.hive.ql.DriverContext) Table(org.apache.hadoop.hive.ql.metadata.Table) FileStatus(org.apache.hadoop.fs.FileStatus) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HadoopShims(org.apache.hadoop.hive.shims.HadoopShims) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) StringUtils.stringifyException(org.apache.hadoop.util.StringUtils.stringifyException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLException(java.sql.SQLException) FileNotFoundException(java.io.FileNotFoundException) HiveAuthzPluginException(org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) ColumnTruncateTask(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateTask) FileSystem(org.apache.hadoop.fs.FileSystem) HdfsUtils(org.apache.hadoop.hive.io.HdfsUtils) ColumnTruncateWork(org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork)

Aggregations

FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 URISyntaxException (java.net.URISyntaxException)1 SQLException (java.sql.SQLException)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 HdfsUtils (org.apache.hadoop.hive.io.HdfsUtils)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)1 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 DriverContext (org.apache.hadoop.hive.ql.DriverContext)1 ColumnTruncateTask (org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateTask)1 ColumnTruncateWork (org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)1 Table (org.apache.hadoop.hive.ql.metadata.Table)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 HiveAuthzPluginException (org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException)1