use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.
the class TextDescTableFormatter method addStatsData.
private void addStatsData(DataOutputStream out, String columnPath, List<FieldSchema> columns, boolean isFormatted, List<ColumnStatisticsObj> columnStats, boolean isOutputPadded) throws IOException {
String statsData = "";
TextMetaDataTable metaDataTable = new TextMetaDataTable();
boolean needColStats = isFormatted && columnPath != null;
if (needColStats) {
metaDataTable.addRow(DescTableDesc.COLUMN_STATISTICS_HEADERS.toArray(new String[0]));
} else if (isFormatted && !SessionState.get().isHiveServerQuery()) {
statsData += "# ";
metaDataTable.addRow(DescTableDesc.SCHEMA.split("#")[0].split(","));
}
for (FieldSchema column : columns) {
metaDataTable.addRow(ShowUtils.extractColumnValues(column, needColStats, getColumnStatisticsObject(column.getName(), column.getType(), columnStats)));
}
if (needColStats) {
metaDataTable.transpose();
}
statsData += metaDataTable.renderTable(isOutputPadded);
out.write(statsData.getBytes(StandardCharsets.UTF_8));
}
use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.
the class ShowColumnsOperation method writeColumns.
private void writeColumns(DataOutputStream outStream, List<FieldSchema> columns) throws IOException {
TextMetaDataTable tmd = new TextMetaDataTable();
for (FieldSchema fieldSchema : columns) {
tmd.addRow(ShowUtils.extractColumnValues(fieldSchema, false, null));
}
// In case the query is served by HiveServer2, don't pad it with spaces,
// as HiveServer2 output is consumed by JDBC/ODBC clients.
boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
outStream.writeBytes(tmd.renderTable(isOutputPadded));
}
use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.
the class TextDescTableFormatter method getViewInfo.
private void getViewInfo(StringBuilder tableInfo, Table table, boolean isOutputPadded) {
formatOutput("Original Query:", table.getViewOriginalText(), tableInfo);
formatOutput("Expanded Query:", table.getViewExpandedText(), tableInfo);
if (table.isMaterializedView()) {
formatOutput("Rewrite Enabled:", table.isRewriteEnabled() ? "Yes" : "No", tableInfo);
formatOutput("Outdated for Rewriting:", table.isOutdatedForRewriting() == null ? "Unknown" : table.isOutdatedForRewriting() ? "Yes" : "No", tableInfo);
tableInfo.append(LINE_DELIM).append("# Materialized View Source table information").append(LINE_DELIM);
TextMetaDataTable metaDataTable = new TextMetaDataTable();
metaDataTable.addRow("Table name", "I/U/D since last rebuild");
List<SourceTable> sourceTableList = new ArrayList<>(table.getMVMetadata().getSourceTables());
sourceTableList.sort(Comparator.<SourceTable, String>comparing(sourceTable -> sourceTable.getTable().getDbName()).thenComparing(sourceTable -> sourceTable.getTable().getTableName()));
for (SourceTable sourceTable : sourceTableList) {
String qualifiedTableName = TableName.getQualified(sourceTable.getTable().getCatName(), sourceTable.getTable().getDbName(), sourceTable.getTable().getTableName());
metaDataTable.addRow(qualifiedTableName, String.format("%d/%d/%d", sourceTable.getInsertedCount(), sourceTable.getUpdatedCount(), sourceTable.getDeletedCount()));
}
tableInfo.append(metaDataTable.renderTable(isOutputPadded));
}
}
use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.
the class TextDescTableFormatter method addPartitionTransformData.
private void addPartitionTransformData(DataOutputStream out, Table table, boolean isOutputPadded) throws IOException {
String partitionTransformOutput = "";
if (table.isNonNative() && table.getStorageHandler() != null && table.getStorageHandler().supportsPartitionTransform()) {
List<PartitionTransformSpec> partSpecs = table.getStorageHandler().getPartitionTransformSpec(table);
if (partSpecs != null && !partSpecs.isEmpty()) {
TextMetaDataTable metaDataTable = new TextMetaDataTable();
partitionTransformOutput += LINE_DELIM + "# Partition Transform Information" + LINE_DELIM + "# ";
metaDataTable.addRow(DescTableDesc.PARTITION_TRANSFORM_SPEC_SCHEMA.split("#")[0].split(","));
for (PartitionTransformSpec spec : partSpecs) {
String[] row = new String[2];
row[0] = spec.getColumnName();
if (spec.getTransformType() != null) {
row[1] = spec.getTransformParam().isPresent() ? spec.getTransformType().name() + "[" + spec.getTransformParam().get() + "]" : spec.getTransformType().name();
}
metaDataTable.addRow(row);
}
partitionTransformOutput += metaDataTable.renderTable(isOutputPadded);
}
}
out.write(partitionTransformOutput.getBytes(StandardCharsets.UTF_8));
}
use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.
the class TextDescTableFormatter method addPartitionData.
private void addPartitionData(DataOutputStream out, HiveConf conf, String columnPath, Table table, boolean isFormatted, boolean isOutputPadded) throws IOException {
String partitionData = "";
if (columnPath == null) {
List<FieldSchema> partitionColumns = table.isPartitioned() ? table.getPartCols() : null;
if (CollectionUtils.isNotEmpty(partitionColumns) && conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {
TextMetaDataTable metaDataTable = new TextMetaDataTable();
partitionData += LINE_DELIM + "# Partition Information" + LINE_DELIM + "# ";
metaDataTable.addRow(DescTableDesc.SCHEMA.split("#")[0].split(","));
for (FieldSchema partitionColumn : partitionColumns) {
metaDataTable.addRow(ShowUtils.extractColumnValues(partitionColumn, false, null));
}
partitionData += metaDataTable.renderTable(isOutputPadded);
}
} else {
String statsState = table.getParameters().get(StatsSetupConst.COLUMN_STATS_ACCURATE);
if (table.getParameters() != null && statsState != null) {
StringBuilder stringBuilder = new StringBuilder();
formatOutput(StatsSetupConst.COLUMN_STATS_ACCURATE, isFormatted ? StringEscapeUtils.escapeJava(statsState) : HiveStringUtils.escapeJava(statsState), stringBuilder, isOutputPadded);
partitionData += stringBuilder.toString();
}
}
out.write(partitionData.getBytes(StandardCharsets.UTF_8));
}
Aggregations