Search in sources :

Example 21 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class DatasourceService method getCreateTargetTableSql.

/**
 * 一键生成目标表
 *
 * @param originSourceId
 * @param tableName
 * @param partition
 */
public String getCreateTargetTableSql(Long originSourceId, Long targetSourceId, String tableName, String partition, String sourceSchema, String targetSchema) {
    try {
        BatchDataSource originSource = getOne(originSourceId);
        JSONObject reader = JSON.parseObject(originSource.getDataJson());
        if (!ORIGIN_TABLE_ALLOW_TYPES.contains(originSource.getType())) {
            throw new RdosDefineException("一键生成目标表,只支持关系型数据库、hive和maxCompute类型");
        }
        Map<String, Object> kerberosConfig = datasourceService.fillKerberosConfig(originSourceId);
        Map<String, Object> expandConfigPrepare = datasourceService.expandConfigPrepare(originSourceId);
        List<JSONObject> columnMetaData = new ArrayList<>();
        IClient iClient = ClientCache.getClient(originSource.getType());
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(reader, originSource.getType(), kerberosConfig, expandConfigPrepare);
        SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().schema(sourceSchema).tableName(tableName).build();
        List<ColumnMetaDTO> columnMeta = iClient.getColumnMetaData(sourceDTO, sqlQueryDTO);
        if (CollectionUtils.isNotEmpty(columnMeta)) {
            for (ColumnMetaDTO columnMetaDTO : columnMeta) {
                JSONObject jsonObject = JSON.parseObject(JSON.toJSONString(columnMetaDTO));
                jsonObject.put("isPart", columnMetaDTO.getPart());
                columnMetaData.add(jsonObject);
            }
        }
        String comment = iClient.getTableMetaComment(sourceDTO, sqlQueryDTO);
        List<String> partList = null;
        if (StringUtils.isNotBlank(partition)) {
            String[] parts = partition.split("/");
            partList = new ArrayList<>();
            for (String part : parts) {
                String[] partDetail = part.split("=");
                String partCol = partDetail[0];
                if (!partCol.equals("pt")) {
                    partList.add(partCol);
                }
            }
        }
        List<JSONObject> columns = null;
        BatchDataSource targetDataSource = getOne(targetSourceId);
        String sql;
        // 'CHARNT.'CUSTMERS_10_MIN' 需要做处理
        tableName = this.formatTableName(tableName);
        int sourceType = 0;
        if (targetDataSource != null) {
            sourceType = Objects.isNull(targetDataSource) ? DataSourceType.HIVE.getVal() : targetDataSource.getType();
        }
        if (CREATE_TABLE_TO_PG_TABLE.contains(sourceType)) {
            // 注意:ADB For PG不会在此处理,后面单独处理
            columns = convertWriterColumns(columnMetaData, new PostgreSqlWriterFormat());
            sql = generalLibraCreateSql(columns, tableName, targetSchema);
        } else if (sourceType == DataSourceType.TiDB.getVal() || sourceType == DataSourceType.MySQL.getVal()) {
            columns = convertTidbWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.Oracle.getVal()) {
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = this.generalTidbCreateSql(columns, tableName, comment);
        } else if (sourceType == DataSourceType.ADB_FOR_PG.getVal()) {
            columns = ADBForPGUtil.convertADBForPGWriterColumns(columnMetaData);
            sql = ADBForPGUtil.generalCreateSql(targetSchema, tableName, columns, comment);
        } else {
            // 默认走hive建表
            columns = convertWriterColumns(columnMetaData, TYPE_FORMAT);
            sql = generalCreateSql(columns, partList, tableName, comment);
        }
        return sqlFormat(sql);
    } catch (Exception e) {
        throw new RdosDefineException("一键生成目标表失败", e);
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) PostgreSqlWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat) SftpException(com.jcraft.jsch.SftpException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) JSONObject(com.alibaba.fastjson.JSONObject) JSONObject(com.alibaba.fastjson.JSONObject) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO)

Example 22 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class ImpalaUtils method getTableFileType.

public static String getTableFileType(ISourceDTO iSourceDTO, String tableName) {
    IClient iClient = ClientCache.getClient(DataSourceType.IMPALA.getVal());
    com.dtstack.dtcenter.loader.dto.Table iClientTable = iClient.getTable(iSourceDTO, SqlQueryDTO.builder().tableName(tableName).build());
    return iClientTable.getStoreType();
}
Also used : IClient(com.dtstack.dtcenter.loader.client.IClient)

Example 23 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class ImpalaUtils method getImpalaHiveTableDetailInfo.

public static Map<String, Object> getImpalaHiveTableDetailInfo(ISourceDTO iSourceDTO, String tableName) {
    IClient client = ClientCache.getClient(DataSourceType.IMPALA.getVal());
    SqlQueryDTO sqlQueryDTO = SqlQueryDTO.builder().tableName(tableName).build();
    com.dtstack.dtcenter.loader.dto.Table tableInfo = client.getTable(iSourceDTO, sqlQueryDTO);
    List<ColumnMetaDTO> columnMetaDTOList = tableInfo.getColumns();
    List<Column> columns = new ArrayList<>();
    List<Column> partitionColumns = new ArrayList<>();
    ColumnMetaDTO columnMetaDTO = null;
    for (int i = 0; i < columnMetaDTOList.size(); i++) {
        columnMetaDTO = columnMetaDTOList.get(i);
        Column column = new Column();
        column.setName(columnMetaDTO.getKey());
        column.setType(columnMetaDTO.getType());
        column.setComment(columnMetaDTO.getComment());
        column.setIndex(i);
        columns.add(column);
        if (columnMetaDTO.getPart()) {
            partitionColumns.add(column);
        }
    }
    Map<String, Object> map = new HashMap<>();
    map.put("allColumns", columns);
    map.put("partitionColumns", partitionColumns);
    map.put("path", tableInfo.getPath());
    map.put("fieldDelimiter", tableInfo.getDelim());
    return map;
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) Column(com.dtstack.taier.pluginapi.pojo.Column) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO)

Aggregations

IClient (com.dtstack.dtcenter.loader.client.IClient)23 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)20 JSONObject (com.alibaba.fastjson.JSONObject)9 ArrayList (java.util.ArrayList)8 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)7 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)7 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)6 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)5 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)5 IOException (java.io.IOException)5 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)4 SftpException (com.jcraft.jsch.SftpException)4 Map (java.util.Map)4 DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)3 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)3 List (java.util.List)3 Connection (java.sql.Connection)2 ITable (com.dtstack.dtcenter.loader.client.ITable)1 Table (com.dtstack.dtcenter.loader.dto.Table)1 HdfsSourceDTO (com.dtstack.dtcenter.loader.dto.source.HdfsSourceDTO)1