Search in sources :

Example 6 with ColumnMetaDTO

use of com.dtstack.dtcenter.loader.dto.ColumnMetaDTO in project Taier by DTStack.

the class DatasourceService method getCreateTargetTableSql.

/**
 * 一键生成目标表
 *
 * @param originSourceId
 * @param tableName
 * @param partition
 */
public String getCreateTargetTableSql(Long originSourceId, Long targetSourceId, String tableName, String partition, String sourceSchema, String targetSchema) {
    try {
        BatchDataSource originSource = getOne(originSourceId);
        JSONObject reader = JSON.parseObject(originSource.getDataJson());
        if (!ORIGIN_TABLE_ALLOW_TYPES.contains(originSource.getType())) {
            throw new RdosDefineException("一键生成目标表,只支持关系型数据库、hive和maxCompute类型");
        }
        Map<String, Object> kerberosConfig = datasourceService.fillKerberosConfig(originSourceId);
        Map<String, Object> expandConfigPrepare = datasourceService.expandConfigPrepare(originSourceId);
        List<JSONObject> columnMetaData = new ArrayList<>();
        IClient iClient = ClientCache.getClient(originSource.getType());
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(reader, originSource.getType(), kerberosConfig, expandConfigPrepare);
        SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().schema(sourceSchema).tableName(tableName).build();
        List<ColumnMetaDTO> columnMeta = iClient.getColumnMetaData(sourceDTO, sqlQueryDTO);
        if (CollectionUtils.isNotEmpty(columnMeta)) {
            for (ColumnMetaDTO columnMetaDTO : columnMeta) {
                JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
                jsonObject.put("isPart", columnMetaDTO.getPart());
                columnMetaData.add(jsonObject);
            }
        }
        String comment = iClient.getTableMetaComment(sourceDTO, sqlQueryDTO);
        List<String> partList = null;
        if (StringUtils.isNotBlank(partition)) {
            String[] parts = partition.split("/");
            partList = new ArrayList<>();
            for (String part : parts) {
                String[] partDetail = part.split("=");
                String partCol = partDetail[0];
                if (!partCol.equals("pt")) {
                    partList.add(partCol);
                }
            }
        }
        List<JSONObject> columns = null;
        BatchDataSource targetDataSource = getOne(targetSourceId);
        String sql;
        // 'CHARNT.'CUSTMERS_10_MIN' 需要做处理
        tableName = this.formatTableName(tableName);
        int sourceType = 0;
        if (targetDataSource != null) {
            sourceType = Objects.isNull(targetDataSource) ? DataSourceType.HIVE.getVal() : targetDataSource.getType();
        }
        if (CREATE_TABLE_TO_PG_TABLE.contains(sourceType)) {
            // 注意:ADB For PG不会在此处理,后面单独处理
            columns = convertWriterColumns(columnMetaData, new PostgreSqlWriterFormat());
            sql = generalLibraCreateSql(columns, tableName, targetSchema);
        } else if (sourceType == DataSourceType.TiDB.getVal() || sourceType == DataSourceType.MySQL.getVal()) {
            columns = convertTidbWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.Oracle.getVal()) {
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = this.generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.ADB_FOR_PG.getVal()) {
            columns = ADBForPGUtil.convertADBForPGWriterColumns(columnMetaData);
            sql = ADBForPGUtil.generalCreateSql(targetSchema, tableName, columns, comment);
        } else {
            // 默认走hive建表
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalCreateSql(columns, partList, tableName, comment);
        }
        return sqlFormat(sql);
    } catch (Exception e) {
        throw new RdosDefineException("一键生成目标表失败", e);
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) PostgreSqlWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat) SftpException(com.jcraft.jsch.SftpException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) JSONObject(com.alibaba.fastjson.JSONObject) JSONObject(com.alibaba.fastjson.JSONObject) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO)

Example 7 with ColumnMetaDTO

use of com.dtstack.dtcenter.loader.dto.ColumnMetaDTO in project Taier by DTStack.

the class ImpalaUtils method getImpalaHiveTableDetailInfo.

public static Map<String, Object> getImpalaHiveTableDetailInfo(ISourceDTO iSourceDTO, String tableName) {
    IClient client = ClientCache.getClient(DataSourceType.IMPALA.getVal());
    SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).build();
    com.dtstack.dtcenter.loader.dto.Table tableInfo = client.getTable(iSourceDTO, sqlQueryDTO);
    List<ColumnMetaDTO> columnMetaDTOList = tableInfo.getColumns();
    List<Column> columns = new ArrayList<>();
    List<Column> partitionColumns = new ArrayList<>();
    ColumnMetaDTO columnMetaDTO = null;
    for (int i = 0; i < columnMetaDTOList.size(); i++) {
        columnMetaDTO = columnMetaDTOList.get(i);
        Column column = new Column();
        column.setName(columnMetaDTO.getKey());
        column.setType(columnMetaDTO.getType());
        column.setComment(columnMetaDTO.getComment());
        column.setIndex(i);
        columns.add(column);
        if (columnMetaDTO.getPart()) {
            partitionColumns.add(column);
        }
    }
    Map<String, Object> map = new HashMap<>();
    map.put("allColumns", columns);
    map.put("partitionColumns", partitionColumns);
    map.put("path", tableInfo.getPath());
    map.put("fieldDelimiter", tableInfo.getDelim());
    return map;
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) Column(com.dtstack.taier.pluginapi.pojo.Column) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO)

Aggregations

IClient (com.dtstack.dtcenter.loader.client.IClient)7 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)7 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)6 JSONObject (com.alibaba.fastjson.JSONObject)5 ArrayList (java.util.ArrayList)5 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)4 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)3 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)3 IOException (java.io.IOException)3 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)2 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)2 SftpException (com.jcraft.jsch.SftpException)2 Map (java.util.Map)2 ITable (com.dtstack.dtcenter.loader.client.ITable)1 Table (com.dtstack.dtcenter.loader.dto.Table)1 DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)1 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)1 PostgreSqlWriterFormat (com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat)1 Column (com.dtstack.taier.pluginapi.pojo.Column)1 Connection (java.sql.Connection)1