use of com.dtstack.dtcenter.loader.dto.Table in project Taier by DTStack.
the class HiveBase method inferHdfsParams.
protected void inferHdfsParams() {
if (inferred.compareAndSet(false, true) && StringUtils.isNotBlank(table)) {
DataSourceType sourceType = DataSourceType.getSourceType(dataSourceType);
JSONObject dataJson = new JSONObject();
dataJson.put(SourceDTOType.JDBC_URL, jdbcUrl);
dataJson.put(SourceDTOType.JDBC_USERNAME, username);
dataJson.put(SourceDTOType.JDBC_PASSWORD, password);
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(dataJson, sourceType.getVal(), kerberosConfig, Maps.newHashMap());
IClient client = ClientCache.getClient(sourceType.getVal());
Table tableInfo = client.getTable(sourceDTO, SqlQueryDTO.builder().tableName(this.table).build());
List<ColumnMetaDTO> columnMetaData = tableInfo.getColumns();
for (ColumnMetaDTO dto : columnMetaData) {
if (!dto.getPart()) {
fullColumnNames.add(dto.getKey());
fullColumnTypes.add(dto.getType());
} else {
isPartitioned = true;
partitionedBy.add(dto.getKey());
}
}
if (isPartitioned) {
ITable tableClient = ClientCache.getTable(sourceType.getVal());
List<String> partitions = tableClient.showPartitions(sourceDTO, table);
partitions.forEach(bean -> {
partitionList.add(bean);
});
}
this.dbName = tableInfo.getDb();
this.path = tableInfo.getPath();
this.fileType = tableInfo.getStoreType();
this.fieldDelimiter = tableInfo.getDelim();
this.isTransaction = tableInfo.getIsTransTable();
}
for (int i = 0; i < fullColumnNames.size(); i++) {
for (Object col : column) {
if (fullColumnNames.get(i).equals(((Map<String, Object>) col).get("key"))) {
((Map<String, Object>) col).put("index", i);
break;
}
}
}
}
Aggregations