Search in sources :

Example 1 with PostgreSqlWriterFormat

use of com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat in project Taier by DTStack.

the class DatasourceService method getCreateTargetTableSql.

/**
 * 一键生成目标表
 *
 * @param originSourceId
 * @param tableName
 * @param partition
 */
public String getCreateTargetTableSql(Long originSourceId, Long targetSourceId, String tableName, String partition, String sourceSchema, String targetSchema) {
    try {
        BatchDataSource originSource = getOne(originSourceId);
        JSONObject reader = JSON.parseObject(originSource.getDataJson());
        if (!ORIGIN_TABLE_ALLOW_TYPES.contains(originSource.getType())) {
            throw new RdosDefineException("一键生成目标表,只支持关系型数据库、hive和maxCompute类型");
        }
        Map<String, Object> kerberosConfig = datasourceService.fillKerberosConfig(originSourceId);
        Map<String, Object> expandConfigPrepare = datasourceService.expandConfigPrepare(originSourceId);
        List<JSONObject> columnMetaData = new ArrayList<>();
        IClient iClient = ClientCache.getClient(originSource.getType());
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(reader, originSource.getType(), kerberosConfig, expandConfigPrepare);
        SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().schema(sourceSchema).tableName(tableName).build();
        List<ColumnMetaDTO> columnMeta = iClient.getColumnMetaData(sourceDTO, sqlQueryDTO);
        if (CollectionUtils.isNotEmpty(columnMeta)) {
            for (ColumnMetaDTO columnMetaDTO : columnMeta) {
                JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
                jsonObject.put("isPart", columnMetaDTO.getPart());
                columnMetaData.add(jsonObject);
            }
        }
        String comment = iClient.getTableMetaComment(sourceDTO, sqlQueryDTO);
        List<String> partList = null;
        if (StringUtils.isNotBlank(partition)) {
            String[] parts = partition.split("/");
            partList = new ArrayList<>();
            for (String part : parts) {
                String[] partDetail = part.split("=");
                String partCol = partDetail[0];
                if (!partCol.equals("pt")) {
                    partList.add(partCol);
                }
            }
        }
        List<JSONObject> columns = null;
        BatchDataSource targetDataSource = getOne(targetSourceId);
        String sql;
        // 'CHARNT.'CUSTMERS_10_MIN' 需要做处理
        tableName = this.formatTableName(tableName);
        int sourceType = 0;
        if (targetDataSource != null) {
            sourceType = Objects.isNull(targetDataSource) ? DataSourceType.HIVE.getVal() : targetDataSource.getType();
        }
        if (CREATE_TABLE_TO_PG_TABLE.contains(sourceType)) {
            // 注意:ADB For PG不会在此处理,后面单独处理
            columns = convertWriterColumns(columnMetaData, new PostgreSqlWriterFormat());
            sql = generalLibraCreateSql(columns, tableName, targetSchema);
        } else if (sourceType == DataSourceType.TiDB.getVal() || sourceType == DataSourceType.MySQL.getVal()) {
            columns = convertTidbWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.Oracle.getVal()) {
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = this.generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.ADB_FOR_PG.getVal()) {
            columns = ADBForPGUtil.convertADBForPGWriterColumns(columnMetaData);
            sql = ADBForPGUtil.generalCreateSql(targetSchema, tableName, columns, comment);
        } else {
            // 默认走hive建表
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalCreateSql(columns, partList, tableName, comment);
        }
        return sqlFormat(sql);
    } catch (Exception e) {
        throw new RdosDefineException("一键生成目标表失败", e);
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) PostgreSqlWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat) SftpException(com.jcraft.jsch.SftpException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) JSONObject(com.alibaba.fastjson.JSONObject) JSONObject(com.alibaba.fastjson.JSONObject) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO)

Aggregations

JSONObject (com.alibaba.fastjson.JSONObject)1 IClient (com.dtstack.dtcenter.loader.client.IClient)1 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)1 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)1 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)1 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)1 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)1 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)1 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)1 PostgreSqlWriterFormat (com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat)1 SftpException (com.jcraft.jsch.SftpException)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1