use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class DatasourceService method tablelist.
/**
* 数据同步-获得数据库中相关的表信息
*
* @param sourceId 数据源id
* @param schema 查询的schema
* @param name 模糊查询表名
* @return
*/
public List<String> tablelist(Long sourceId, String schema, String name) {
List<String> tables = new ArrayList<>();
BatchDataSource source = getOne(sourceId);
String dataJson = source.getDataJson();
JSONObject json = JSON.parseObject(dataJson);
// 查询的db
String dataSource = schema;
IClient client = ClientCache.getClient(source.getType());
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), fillKerberosConfig(source.getId()), Maps.newHashMap());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableNamePattern(name).limit(5000).build();
sqlQueryDTO.setView(true);
sqlQueryDTO.setSchema(dataSource);
// 如果是hive类型的数据源 过滤脏数据表 和 临时表
tables = client.getTableList(sourceDTO, sqlQueryDTO);
return tables;
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class DatasourceService method getCreateTargetTableSql.
/**
* 一键生成目标表
*
* @param originSourceId
* @param tableName
* @param partition
*/
public String getCreateTargetTableSql(Long originSourceId, Long targetSourceId, String tableName, String partition, String sourceSchema, String targetSchema) {
try {
BatchDataSource originSource = getOne(originSourceId);
JSONObject reader = JSON.parseObject(originSource.getDataJson());
if (!ORIGIN_TABLE_ALLOW_TYPES.contains(originSource.getType())) {
throw new RdosDefineException("一键生成目标表,只支持关系型数据库、hive和maxCompute类型");
}
Map<String, Object> kerberosConfig = datasourceService.fillKerberosConfig(originSourceId);
Map<String, Object> expandConfigPrepare = datasourceService.expandConfigPrepare(originSourceId);
List<JSONObject> columnMetaData = new ArrayList<>();
IClient iClient = ClientCache.getClient(originSource.getType());
ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(reader, originSource.getType(), kerberosConfig, expandConfigPrepare);
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().schema(sourceSchema).tableName(tableName).build();
List<ColumnMetaDTO> columnMeta = iClient.getColumnMetaData(sourceDTO, sqlQueryDTO);
if (CollectionUtils.isNotEmpty(columnMeta)) {
for (ColumnMetaDTO columnMetaDTO : columnMeta) {
JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
jsonObject.put("isPart", columnMetaDTO.getPart());
columnMetaData.add(jsonObject);
}
}
String comment = iClient.getTableMetaComment(sourceDTO, sqlQueryDTO);
List<String> partList = null;
if (StringUtils.isNotBlank(partition)) {
String[] parts = partition.split("/");
partList = new ArrayList<>();
for (String part : parts) {
String[] partDetail = part.split("=");
String partCol = partDetail[0];
if (!partCol.equals("pt")) {
partList.add(partCol);
}
}
}
List<JSONObject> columns = null;
BatchDataSource targetDataSource = getOne(targetSourceId);
String sql;
// 'CHARNT.'CUSTMERS_10_MIN' 需要做处理
tableName = this.formatTableName(tableName);
int sourceType = 0;
if (targetDataSource != null) {
sourceType = Objects.isNull(targetDataSource) ? DataSourceType.HIVE.getVal() : targetDataSource.getType();
}
if (CREATE_TABLE_TO_PG_TABLE.contains(sourceType)) {
// 注意:ADB For PG不会在此处理,后面单独处理
columns = convertWriterColumns(columnMetaData, new PostgreSqlWriterFormat());
sql = generalLibraCreateSql(columns, tableName, targetSchema);
} else if (sourceType == DataSourceType.TiDB.getVal() || sourceType == DataSourceType.MySQL.getVal()) {
columns = convertTidbWriterColumns(columnMetaData, TYPE_FORMAT);
sql = generalTidbCreateSql(columns, tableName, comment);
} else if (sourceType == DataSourceType.Oracle.getVal()) {
columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
sql = this.generalTidbCreateSql(columns, tableName, comment);
} else if (sourceType == DataSourceType.ADB_FOR_PG.getVal()) {
columns = ADBForPGUtil.convertADBForPGWriterColumns(columnMetaData);
sql = ADBForPGUtil.generalCreateSql(targetSchema, tableName, columns, comment);
} else {
// 默认走hive建表
columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
sql = generalCreateSql(columns, partList, tableName, comment);
}
return sqlFormat(sql);
} catch (Exception e) {
throw new RdosDefineException("一键生成目标表失败", e);
}
}
use of com.dtstack.dtcenter.loader.dto.SqlQueryDTO in project Taier by DTStack.
the class ImpalaUtils method getImpalaHiveTableDetailInfo.
public static Map<String, Object> getImpalaHiveTableDetailInfo(ISourceDTO iSourceDTO, String tableName) {
IClient client = ClientCache.getClient(DataSourceType.IMPALA.getVal());
SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).build();
com.dtstack.dtcenter.loader.dto.Table tableInfo = client.getTable(iSourceDTO, sqlQueryDTO);
List<ColumnMetaDTO> columnMetaDTOList = tableInfo.getColumns();
List<Column> columns = new ArrayList<>();
List<Column> partitionColumns = new ArrayList<>();
ColumnMetaDTO columnMetaDTO = null;
for (int i = 0; i < columnMetaDTOList.size(); i++) {
columnMetaDTO = columnMetaDTOList.get(i);
Column column = new Column();
column.setName(columnMetaDTO.getKey());
column.setType(columnMetaDTO.getType());
column.setComment(columnMetaDTO.getComment());
column.setIndex(i);
columns.add(column);
if (columnMetaDTO.getPart()) {
partitionColumns.add(column);
}
}
Map<String, Object> map = new HashMap<>();
map.put("allColumns", columns);
map.put("partitionColumns", partitionColumns);
map.put("path", tableInfo.getPath());
map.put("fieldDelimiter", tableInfo.getDelim());
return map;
}
Aggregations