Search in sources :

Example 1 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class HiveBase method inferHdfsParams.

protected void inferHdfsParams() {
    if (inferred.compareAndSet(false, true) && StringUtils.isNotBlank(table)) {
        DataSourceType sourceType = DataSourceType.getSourceType(dataSourceType);
        JSONObject dataJson = new JSONObject();
        dataJson.put(SourceDTOType.JDBC_URL, jdbcUrl);
        dataJson.put(SourceDTOType.JDBC_USERNAME, username);
        dataJson.put(SourceDTOType.JDBC_PASSWORD, password);
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(dataJson, sourceType.getVal(), kerberosConfig, Maps.newHashMap());
        IClient client = ClientCache.getClient(sourceType.getVal());
        Table tableInfo = client.getTable(sourceDTO, SqlQueryDTO.builder().tableName(this.table).build());
        List<ColumnMetaDTO> columnMetaData = tableInfo.getColumns();
        for (ColumnMetaDTO dto : columnMetaData) {
            if (!dto.getPart()) {
                fullColumnNames.add(dto.getKey());
                fullColumnTypes.add(dto.getType());
            } else {
                isPartitioned = true;
                partitionedBy.add(dto.getKey());
            }
        }
        if (isPartitioned) {
            ITable tableClient = ClientCache.getTable(sourceType.getVal());
            List<String> partitions = tableClient.showPartitions(sourceDTO, table);
            partitions.forEach(bean -> {
                partitionList.add(bean);
            });
        }
        this.dbName = tableInfo.getDb();
        this.path = tableInfo.getPath();
        this.fileType = tableInfo.getStoreType();
        this.fieldDelimiter = tableInfo.getDelim();
        this.isTransaction = tableInfo.getIsTransTable();
    }
    for (int i = 0; i < fullColumnNames.size(); i++) {
        for (Object col : column) {
            if (fullColumnNames.get(i).equals(((Map<String, Object>) col).get("key"))) {
                ((Map<String, Object>) col).put("index", i);
                break;
            }
        }
    }
}
Also used : Table(com.dtstack.dtcenter.loader.dto.Table) ITable(com.dtstack.dtcenter.loader.client.ITable) IClient(com.dtstack.dtcenter.loader.client.IClient) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) JSONObject(com.alibaba.fastjson.JSONObject) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ITable(com.dtstack.dtcenter.loader.client.ITable) JSONObject(com.alibaba.fastjson.JSONObject) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Map(java.util.Map)

Example 2 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class JdbcServiceImpl method executeQueryWithoutResult.

/**
 * 执行查询
 * @param tenantId
 * @param userId
 * @param eScheduleJobType
 * @param schema
 * @param sql
 * @param connection
 * @return
 */
@Override
public Boolean executeQueryWithoutResult(Long tenantId, Long userId, EScheduleJobType eScheduleJobType, String schema, String sql, Connection connection) {
    ISourceDTO iSourceDTO = Engine2DTOService.get(tenantId, userId, eScheduleJobType, schema);
    iSourceDTO.setConnection(connection);
    IClient client = ClientCache.getClient(iSourceDTO.getSourceType());
    LOGGER.info("集群执行SQL,tenantId:{},userId:{},jobType:{},schema:{},sql:{}", tenantId, userId, eScheduleJobType.getType(), schema, sql);
    client.executeSqlWithoutResultSet(iSourceDTO, SqlQueryDTO.builder().sql(sql).build());
    return Boolean.TRUE;
}
Also used : IClient(com.dtstack.dtcenter.loader.client.IClient) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO)

Example 3 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class JdbcServiceImpl method executeQueryWithVariables.

public List<List<Object>> executeQueryWithVariables(Long tenantId, Long userId, EScheduleJobType eScheduleJobType, String schema, String sql, List<String> variables, Integer limit, String taskParam) {
    List<List<Object>> returnList = new ArrayList<>();
    JdbcInfo jdbcInfo = Engine2DTOService.getJdbcInfo(tenantId, userId, eScheduleJobType);
    Integer maxRows = limit == null || limit == 0 ? jdbcInfo.getMaxRows() : limit;
    ISourceDTO iSourceDTO = Engine2DTOService.get(tenantId, userId, Engine2DTOService.jobTypeTransitionDataSourceType(eScheduleJobType, jdbcInfo.getVersion()).getVal(), schema, jdbcInfo);
    IClient client = ClientCache.getClient(iSourceDTO.getSourceType());
    // 率先获取Con,复用,为什么不使用try with resource,因为关闭捕获的异常太大了
    Connection con = client.getCon(iSourceDTO, taskParam);
    // 处理 variables SQL
    try {
        iSourceDTO.setConnection(con);
        List<Map<String, Object>> list;
        if (CollectionUtils.isNotEmpty(variables)) {
            variables.forEach(variable -> client.executeSqlWithoutResultSet(iSourceDTO, SqlQueryDTO.builder().sql(variable).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build()));
            list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(maxRows).queryTimeout(jdbcInfo.getQueryTimeout()).build());
        } else {
            list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(maxRows).queryTimeout(jdbcInfo.getQueryTimeout()).build());
        }
        LOGGER.info("集群执行SQL查询,tenantId:{},userId:{},jobType:{},schema:{},sql:{}", tenantId, userId, eScheduleJobType.getType(), schema, sql);
        List<ColumnMetaDTO> columnMetaDataWithSql = client.getColumnMetaDataWithSql(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(0).queryTimeout(jdbcInfo.getQueryTimeout()).build());
        if (CollectionUtils.isNotEmpty(columnMetaDataWithSql)) {
            List<Object> column = new ArrayList<>();
            columnMetaDataWithSql.stream().forEach(bean -> {
                column.add(bean.getKey());
            });
            returnList.add(column);
        }
        // 数据源插件化 查询出值不符合要求  进行转化
        if (CollectionUtils.isNotEmpty(list)) {
            for (Map<String, Object> result : list) {
                List<Object> value = new ArrayList<>();
                result.values().forEach(bean -> {
                    value.add(bean);
                });
                returnList.add(value);
            }
        }
    } finally {
        iSourceDTO.setConnection(null);
        DBUtil.closeDBResources(null, null, con);
    }
    return returnList;
}
Also used : ArrayList(java.util.ArrayList) Connection(java.sql.Connection) IClient(com.dtstack.dtcenter.loader.client.IClient) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) ArrayList(java.util.ArrayList) List(java.util.List) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Map(java.util.Map) JdbcInfo(com.dtstack.taier.common.engine.JdbcInfo)

Example 4 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class DatasourceService method getHivePartitions.

public Set<String> getHivePartitions(Long sourceId, String tableName) {
    BatchDataSource source = getOne(sourceId);
    JSONObject json = JSON.parseObject(source.getDataJson());
    Map<String, Object> kerberosConfig = this.fillKerberosConfig(sourceId);
    ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, Maps.newHashMap());
    IClient iClient = ClientCache.getClient(source.getType());
    List<ColumnMetaDTO> partitionColumn = iClient.getPartitionColumn(sourceDTO, SqlQueryDTO.builder().tableName(tableName).build());
    Set<String> partitionNameSet = Sets.newHashSet();
    // 格式化分区信息 与hive保持一致
    if (CollectionUtils.isNotEmpty(partitionColumn)) {
        StringJoiner tempJoiner = new StringJoiner("=/", "", "=");
        for (ColumnMetaDTO column : partitionColumn) {
            tempJoiner.add(column.getKey());
        }
        partitionNameSet.add(tempJoiner.toString());
    }
    return partitionNameSet;
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) JSONObject(com.alibaba.fastjson.JSONObject) JSONObject(com.alibaba.fastjson.JSONObject) IClient(com.dtstack.dtcenter.loader.client.IClient) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) StringJoiner(java.util.StringJoiner) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)

Example 5 with IClient

use of com.dtstack.dtcenter.loader.client.IClient in project Taier by DTStack.

the class DatasourceService method executeOnSpecifySourceWithOutResult.

/**
 * @param sourceId  数据源id
 * @param sql  拼写sql
 * @param targetSchema 只做doris入参,其他类型不用传
 */
private void executeOnSpecifySourceWithOutResult(Long sourceId, String sql, String targetSchema) {
    BatchDataSource source = getOne(sourceId);
    DataSourceType dataSourceType = DataSourceType.getSourceType(source.getType());
    if (!SUPPORT_CREATE_TABLE_DATASOURCES.contains(dataSourceType)) {
        throw new RdosDefineException(String.format("只支持创建%s数据源表", SUPPORT_CREATE_TABLE_DATASOURCES_NAMES));
    }
    JSONObject json = JSON.parseObject(source.getDataJson());
    try {
        Map<String, Object> kerberosConfig = fillKerberosConfig(sourceId);
        Map<String, Object> expandConfigPrepare = expandConfigPrepare(sourceId);
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, expandConfigPrepare);
        IClient iClient = ClientCache.getClient(dataSourceType.getVal());
        Connection con = iClient.getCon(sourceDTO);
        DBUtil.executeSqlWithoutResultSet(con, sql, false);
    } catch (Exception e) {
        throw new RdosDefineException(e.getMessage() + "。 执行sql = " + sql, e);
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) Connection(java.sql.Connection) JSONObject(com.alibaba.fastjson.JSONObject) IClient(com.dtstack.dtcenter.loader.client.IClient) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) SftpException(com.jcraft.jsch.SftpException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException)

Aggregations

IClient (com.dtstack.dtcenter.loader.client.IClient)23 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)20 JSONObject (com.alibaba.fastjson.JSONObject)9 ArrayList (java.util.ArrayList)8 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)7 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)7 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)6 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)5 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)5 IOException (java.io.IOException)5 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)4 SftpException (com.jcraft.jsch.SftpException)4 Map (java.util.Map)4 DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)3 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)3 List (java.util.List)3 Connection (java.sql.Connection)2 ITable (com.dtstack.dtcenter.loader.client.ITable)1 Table (com.dtstack.dtcenter.loader.dto.Table)1 HdfsSourceDTO (com.dtstack.dtcenter.loader.dto.source.HdfsSourceDTO)1