use of org.apache.hadoop.hive.ql.ddl.table.info.desc.formatter.DescTableFormatter in project hive by apache.
the class DescTableOperation method execute.
@Override
public int execute() throws Exception {
Table table = getTable();
Partition part = getPartition(table);
final String dbTableName = desc.getDbTableName();
try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
LOG.debug("DDLTask: got data for {}", dbTableName);
List<FieldSchema> cols = new ArrayList<>();
List<ColumnStatisticsObj> colStats = new ArrayList<>();
Deserializer deserializer = getDeserializer(table);
if (desc.getColumnPath() == null) {
getColumnsNoColumnPath(table, part, cols);
} else {
if (desc.isFormatted()) {
getColumnDataColPathSpecified(table, part, cols, colStats, deserializer);
} else {
cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
}
}
fixDecimalColumnTypeName(cols);
setConstraintsAndStorageHandlerInfo(table);
handleMaterializedView(table);
// In case the query is served by HiveServer2, don't pad it with spaces,
// as HiveServer2 output is consumed by JDBC/ODBC clients.
boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
DescTableFormatter formatter = DescTableFormatter.getFormatter(context.getConf());
formatter.describeTable(context.getConf(), outStream, desc.getColumnPath(), dbTableName, table, part, cols, desc.isFormatted(), desc.isExtended(), isOutputPadded, colStats);
LOG.debug("DDLTask: written data for {}", dbTableName);
} catch (SQLException e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR, dbTableName);
}
return 0;
}
Aggregations