use of org.apache.hadoop.hive.metastore.TableIterable in project hive by apache.
the class GetColumnsOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
LOG.info("Fetching column metadata");
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
String schemaPattern = convertSchemaPattern(schemaName);
String tablePattern = convertIdentifierPattern(tableName, true);
Pattern columnPattern = null;
if (columnName != null) {
columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
}
List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
Collections.sort(dbNames);
Map<String, List<String>> db2Tabs = new HashMap<>();
for (String dbName : dbNames) {
List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
Collections.sort(tableNames);
db2Tabs.put(dbName, tableNames);
}
if (isAuthV2Enabled()) {
List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
}
int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
String dbName = dbTabs.getKey();
List<String> tableNames = dbTabs.getValue();
for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
Set<String> pkColNames = new HashSet<>();
for (SQLPrimaryKey key : primaryKeys) {
pkColNames.add(key.getColumn_name().toLowerCase());
}
for (ColumnDescriptor column : schema.getColumnDescriptors()) {
if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
continue;
}
Object[] rowData = new Object[] { // TABLE_CAT
null, // TABLE_SCHEM
table.getDbName(), // TABLE_NAME
table.getTableName(), // COLUMN_NAME
column.getName(), // DATA_TYPE
column.getType().toJavaSQLType(), // TYPE_NAME
column.getTypeName(), // COLUMN_SIZE
column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
null, // DECIMAL_DIGITS
column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
DatabaseMetaData.columnNullable, // REMARKS
column.getComment(), // COLUMN_DEF
null, // SQL_DATA_TYPE
null, // SQL_DATETIME_SUB
null, // CHAR_OCTET_LENGTH
null, // ORDINAL_POSITION
column.getOrdinalPosition(), // IS_NULLABLE
pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
null, // SCOPE_SCHEMA
null, // SCOPE_TABLE
null, // SOURCE_DATA_TYPE
null, // IS_AUTO_INCREMENT
"NO" };
rowSet.addRow(rowData);
if (LOG.isDebugEnabled()) {
String debugMessage = getDebugMessage("column", RESULT_SET_SCHEMA);
LOG.debug(debugMessage, rowData);
}
}
}
}
if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
LOG.debug("No column metadata has been returned.");
}
setState(OperationState.FINISHED);
LOG.info("Fetching column metadata has been successfully finished");
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
Aggregations