Search in sources :

Example 1 with TextMetaDataTable

use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.

the class TextDescTableFormatter method addStatsData.

private void addStatsData(DataOutputStream out, String columnPath, List<FieldSchema> columns, boolean isFormatted, List<ColumnStatisticsObj> columnStats, boolean isOutputPadded) throws IOException {
    String statsData = "";
    TextMetaDataTable metaDataTable = new TextMetaDataTable();
    boolean needColStats = isFormatted && columnPath != null;
    if (needColStats) {
        metaDataTable.addRow(DescTableDesc.COLUMN_STATISTICS_HEADERS.toArray(new String[0]));
    } else if (isFormatted && !SessionState.get().isHiveServerQuery()) {
        statsData += "# ";
        metaDataTable.addRow(DescTableDesc.SCHEMA.split("#")[0].split(","));
    }
    for (FieldSchema column : columns) {
        metaDataTable.addRow(ShowUtils.extractColumnValues(column, needColStats, getColumnStatisticsObject(column.getName(), column.getType(), columnStats)));
    }
    if (needColStats) {
        metaDataTable.transpose();
    }
    statsData += metaDataTable.renderTable(isOutputPadded);
    out.write(statsData.getBytes(StandardCharsets.UTF_8));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)

Example 2 with TextMetaDataTable

use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.

the class ShowColumnsOperation method writeColumns.

private void writeColumns(DataOutputStream outStream, List<FieldSchema> columns) throws IOException {
    TextMetaDataTable tmd = new TextMetaDataTable();
    for (FieldSchema fieldSchema : columns) {
        tmd.addRow(ShowUtils.extractColumnValues(fieldSchema, false, null));
    }
    // In case the query is served by HiveServer2, don't pad it with spaces,
    // as HiveServer2 output is consumed by JDBC/ODBC clients.
    boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
    outStream.writeBytes(tmd.renderTable(isOutputPadded));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)

Example 3 with TextMetaDataTable

use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.

the class TextDescTableFormatter method getViewInfo.

private void getViewInfo(StringBuilder tableInfo, Table table, boolean isOutputPadded) {
    formatOutput("Original Query:", table.getViewOriginalText(), tableInfo);
    formatOutput("Expanded Query:", table.getViewExpandedText(), tableInfo);
    if (table.isMaterializedView()) {
        formatOutput("Rewrite Enabled:", table.isRewriteEnabled() ? "Yes" : "No", tableInfo);
        formatOutput("Outdated for Rewriting:", table.isOutdatedForRewriting() == null ? "Unknown" : table.isOutdatedForRewriting() ? "Yes" : "No", tableInfo);
        tableInfo.append(LINE_DELIM).append("# Materialized View Source table information").append(LINE_DELIM);
        TextMetaDataTable metaDataTable = new TextMetaDataTable();
        metaDataTable.addRow("Table name", "I/U/D since last rebuild");
        List<SourceTable> sourceTableList = new ArrayList<>(table.getMVMetadata().getSourceTables());
        sourceTableList.sort(Comparator.<SourceTable, String>comparing(sourceTable -> sourceTable.getTable().getDbName()).thenComparing(sourceTable -> sourceTable.getTable().getTableName()));
        for (SourceTable sourceTable : sourceTableList) {
            String qualifiedTableName = TableName.getQualified(sourceTable.getTable().getCatName(), sourceTable.getTable().getDbName(), sourceTable.getTable().getTableName());
            metaDataTable.addRow(qualifiedTableName, String.format("%d/%d/%d", sourceTable.getInsertedCount(), sourceTable.getUpdatedCount(), sourceTable.getDeletedCount()));
        }
        tableInfo.append(metaDataTable.renderTable(isOutputPadded));
    }
}
Also used : CheckConstraint(org.apache.hadoop.hive.ql.metadata.CheckConstraint) Date(java.util.Date) ConfVars(org.apache.hadoop.hive.conf.HiveConf.ConfVars) DefaultConstraintCol(org.apache.hadoop.hive.ql.metadata.DefaultConstraint.DefaultConstraintCol) StatsSetupConst(org.apache.hadoop.hive.common.StatsSetupConst) DataOutputStream(java.io.DataOutputStream) ForeignKeyInfo(org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo) Map(java.util.Map) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) LINE_DELIM(org.apache.hadoop.hive.ql.ddl.ShowUtils.LINE_DELIM) NotNullConstraint(org.apache.hadoop.hive.ql.metadata.NotNullConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) UniqueConstraint(org.apache.hadoop.hive.ql.metadata.UniqueConstraint) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) Set(java.util.Set) SessionState(org.apache.hadoop.hive.ql.session.SessionState) Collectors(java.util.stream.Collectors) StandardCharsets(java.nio.charset.StandardCharsets) List(java.util.List) ForeignKeyCol(org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol) HiveStringUtils(org.apache.hive.common.util.HiveStringUtils) UniqueConstraintCol(org.apache.hadoop.hive.ql.metadata.UniqueConstraint.UniqueConstraintCol) Entry(java.util.Map.Entry) UnsupportedEncodingException(java.io.UnsupportedEncodingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DEFAULT_STRINGBUILDER_SIZE(org.apache.hadoop.hive.ql.ddl.ShowUtils.DEFAULT_STRINGBUILDER_SIZE) DescTableDesc(org.apache.hadoop.hive.ql.ddl.table.info.desc.DescTableDesc) FIELD_DELIM(org.apache.hadoop.hive.ql.ddl.ShowUtils.FIELD_DELIM) CollectionUtils(org.apache.commons.collections4.CollectionUtils) TABLE_IS_CTAS(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS) ArrayList(java.util.ArrayList) Utilities(org.apache.hadoop.hive.ql.exec.Utilities) PrimaryKeyInfo(org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo) ShowUtils.formatOutput(org.apache.hadoop.hive.ql.ddl.ShowUtils.formatOutput) ShowUtils(org.apache.hadoop.hive.ql.ddl.ShowUtils) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TableName(org.apache.hadoop.hive.common.TableName) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable) PlanUtils(org.apache.hadoop.hive.ql.plan.PlanUtils) MapUtils(org.apache.commons.collections4.MapUtils) PartitionTransformSpec(org.apache.hadoop.hive.ql.parse.PartitionTransformSpec) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ALIGNMENT(org.apache.hadoop.hive.ql.ddl.ShowUtils.ALIGNMENT) StringEscapeUtils(org.apache.commons.text.StringEscapeUtils) Table(org.apache.hadoop.hive.ql.metadata.Table) IOException(java.io.IOException) Partition(org.apache.hadoop.hive.ql.metadata.Partition) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TreeMap(java.util.TreeMap) TableType(org.apache.hadoop.hive.metastore.TableType) Comparator(java.util.Comparator) CheckConstraintCol(org.apache.hadoop.hive.ql.metadata.CheckConstraint.CheckConstraintCol) Collections(java.util.Collections) ArrayList(java.util.ArrayList) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)

Example 4 with TextMetaDataTable

use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.

the class TextDescTableFormatter method addPartitionTransformData.

private void addPartitionTransformData(DataOutputStream out, Table table, boolean isOutputPadded) throws IOException {
    String partitionTransformOutput = "";
    if (table.isNonNative() && table.getStorageHandler() != null && table.getStorageHandler().supportsPartitionTransform()) {
        List<PartitionTransformSpec> partSpecs = table.getStorageHandler().getPartitionTransformSpec(table);
        if (partSpecs != null && !partSpecs.isEmpty()) {
            TextMetaDataTable metaDataTable = new TextMetaDataTable();
            partitionTransformOutput += LINE_DELIM + "# Partition Transform Information" + LINE_DELIM + "# ";
            metaDataTable.addRow(DescTableDesc.PARTITION_TRANSFORM_SPEC_SCHEMA.split("#")[0].split(","));
            for (PartitionTransformSpec spec : partSpecs) {
                String[] row = new String[2];
                row[0] = spec.getColumnName();
                if (spec.getTransformType() != null) {
                    row[1] = spec.getTransformParam().isPresent() ? spec.getTransformType().name() + "[" + spec.getTransformParam().get() + "]" : spec.getTransformType().name();
                }
                metaDataTable.addRow(row);
            }
            partitionTransformOutput += metaDataTable.renderTable(isOutputPadded);
        }
    }
    out.write(partitionTransformOutput.getBytes(StandardCharsets.UTF_8));
}
Also used : PartitionTransformSpec(org.apache.hadoop.hive.ql.parse.PartitionTransformSpec) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)

Example 5 with TextMetaDataTable

use of org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable in project hive by apache.

the class TextDescTableFormatter method addPartitionData.

private void addPartitionData(DataOutputStream out, HiveConf conf, String columnPath, Table table, boolean isFormatted, boolean isOutputPadded) throws IOException {
    String partitionData = "";
    if (columnPath == null) {
        List<FieldSchema> partitionColumns = table.isPartitioned() ? table.getPartCols() : null;
        if (CollectionUtils.isNotEmpty(partitionColumns) && conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {
            TextMetaDataTable metaDataTable = new TextMetaDataTable();
            partitionData += LINE_DELIM + "# Partition Information" + LINE_DELIM + "# ";
            metaDataTable.addRow(DescTableDesc.SCHEMA.split("#")[0].split(","));
            for (FieldSchema partitionColumn : partitionColumns) {
                metaDataTable.addRow(ShowUtils.extractColumnValues(partitionColumn, false, null));
            }
            partitionData += metaDataTable.renderTable(isOutputPadded);
        }
    } else {
        String statsState = table.getParameters().get(StatsSetupConst.COLUMN_STATS_ACCURATE);
        if (table.getParameters() != null && statsState != null) {
            StringBuilder stringBuilder = new StringBuilder();
            formatOutput(StatsSetupConst.COLUMN_STATS_ACCURATE, isFormatted ? StringEscapeUtils.escapeJava(statsState) : HiveStringUtils.escapeJava(statsState), stringBuilder, isOutputPadded);
            partitionData += stringBuilder.toString();
        }
    }
    out.write(partitionData.getBytes(StandardCharsets.UTF_8));
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) TextMetaDataTable(org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)

Aggregations

TextMetaDataTable (org.apache.hadoop.hive.ql.ddl.ShowUtils.TextMetaDataTable)5 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)4 PartitionTransformSpec (org.apache.hadoop.hive.ql.parse.PartitionTransformSpec)2 DataOutputStream (java.io.DataOutputStream)1 IOException (java.io.IOException)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 StandardCharsets (java.nio.charset.StandardCharsets)1 ArrayList (java.util.ArrayList)1 Collections (java.util.Collections)1 Comparator (java.util.Comparator)1 Date (java.util.Date)1 List (java.util.List)1 Map (java.util.Map)1 Entry (java.util.Map.Entry)1 Set (java.util.Set)1 TreeMap (java.util.TreeMap)1 Collectors (java.util.stream.Collectors)1 CollectionUtils (org.apache.commons.collections4.CollectionUtils)1 MapUtils (org.apache.commons.collections4.MapUtils)1 StringEscapeUtils (org.apache.commons.text.StringEscapeUtils)1