Search in sources :

Example 1 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class HiveBase method inferHdfsParams.

protected void inferHdfsParams() {
    if (inferred.compareAndSet(false, true) && StringUtils.isNotBlank(table)) {
        DataSourceType sourceType = DataSourceType.getSourceType(dataSourceType);
        JSONObject dataJson = new JSONObject();
        dataJson.put(SourceDTOType.JDBC_URL, jdbcUrl);
        dataJson.put(SourceDTOType.JDBC_USERNAME, username);
        dataJson.put(SourceDTOType.JDBC_PASSWORD, password);
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(dataJson, sourceType.getVal(), kerberosConfig, Maps.newHashMap());
        IClient client = ClientCache.getClient(sourceType.getVal());
        Table tableInfo = client.getTable(sourceDTO, SqlQueryDTO.builder().tableName(this.table).build());
        List<ColumnMetaDTO> columnMetaData = tableInfo.getColumns();
        for (ColumnMetaDTO dto : columnMetaData) {
            if (!dto.getPart()) {
                fullColumnNames.add(dto.getKey());
                fullColumnTypes.add(dto.getType());
            } else {
                isPartitioned = true;
                partitionedBy.add(dto.getKey());
            }
        }
        if (isPartitioned) {
            ITable tableClient = ClientCache.getTable(sourceType.getVal());
            List<String> partitions = tableClient.showPartitions(sourceDTO, table);
            partitions.forEach(bean -> {
                partitionList.add(bean);
            });
        }
        this.dbName = tableInfo.getDb();
        this.path = tableInfo.getPath();
        this.fileType = tableInfo.getStoreType();
        this.fieldDelimiter = tableInfo.getDelim();
        this.isTransaction = tableInfo.getIsTransTable();
    }
    for (int i = 0; i < fullColumnNames.size(); i++) {
        for (Object col : column) {
            if (fullColumnNames.get(i).equals(((Map<String, Object>) col).get("key"))) {
                ((Map<String, Object>) col).put("index", i);
                break;
            }
        }
    }
}
Also used : Table(com.dtstack.dtcenter.loader.dto.Table) ITable(com.dtstack.dtcenter.loader.client.ITable) IClient(com.dtstack.dtcenter.loader.client.IClient) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) JSONObject(com.alibaba.fastjson.JSONObject) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ITable(com.dtstack.dtcenter.loader.client.ITable) JSONObject(com.alibaba.fastjson.JSONObject) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Map(java.util.Map)

Example 2 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class RDBReader method toReaderJson.

@Override
public JSONObject toReaderJson() {
    JSONObject parameter = new JSONObject(true);
    parameter.put("username", this.getUsername());
    parameter.put("password", this.getPassword());
    parameter.put("splitPk", this.getSplitPK());
    parameter.put("where", this.getWhere());
    parameter.put("column", ColumnUtil.getColumns(this.getColumn(), PluginName.MySQLD_R));
    if (this.getJdbcUrl() != null) {
        JSONObject conn = new JSONObject(true);
        conn.put("jdbcUrl", Arrays.asList(this.getJdbcUrl()));
        conn.put("password", this.getPassword());
        conn.put("username", this.getUsername());
        conn.put("table", this.getTable());
        conn.put("sourceId", this.getSourceId());
        conn.put("schema", isNeedSchema());
        connections = new JSONArray();
        connections.add(conn);
    }
    if (connections != null && connections.size() > 0) {
        Map<String, Object> conn = (Map) connections.get(0);
        this.setJdbcUrl(((List<String>) conn.get("jdbcUrl")).get(0));
        String pass = Objects.isNull(conn.get("password")) ? "" : conn.get("password").toString();
        this.setPassword(pass);
        this.setUsername(MapUtils.getString(conn, "username"));
        if (conn.get("table") instanceof String) {
            this.setTable(Arrays.asList((String) conn.get("table")));
        } else {
            this.setTable((List<String>) conn.get("table"));
        }
    }
    boolean isMultiTable = (CollectionUtils.isNotEmpty(this.getConnections()) && this.getConnections().size() > 1) || (CollectionUtils.isNotEmpty(this.getTable()) && this.getTable().size() > 1);
    // 增量配置
    parameter.put("increColumn", Optional.ofNullable(this.getIncreColumn()).orElse(""));
    parameter.put("startLocation", "");
    parameter.put("connection", connections);
    JSONObject reader = new JSONObject(true);
    DataSourceType dataSourceType = DataSourceType.getSourceType(getType());
    switch(dataSourceType) {
        case MySQL:
        case MySQL8:
        case MySQLPXC:
        case TiDB:
            if (isMultiTable) {
                reader.put("name", PluginName.MySQLD_R);
            } else {
                reader.put("name", PluginName.MySQL_R);
            }
            break;
        case Clickhouse:
            reader.put("name", PluginName.Clickhouse_R);
            break;
        case Polardb_For_MySQL:
            reader.put("name", PluginName.Polardb_for_MySQL_R);
            break;
        case Oracle:
            reader.put("name", PluginName.Oracle_R);
            break;
        case SQLServer:
            reader.put("name", PluginName.SQLServer_R);
            break;
        case HIVE:
        case HIVE3X:
        case HIVE1X:
            reader.put("name", PluginName.Hive_R);
            break;
        case PostgreSQL:
        case LIBRA:
            reader.put("name", PluginName.PostgreSQL_R);
            break;
        case DB2:
            reader.put("name", PluginName.DB2_R);
            break;
        case GBase_8a:
            reader.put("name", PluginName.GBase_R);
            break;
        case Phoenix:
            reader.put("name", PluginName.Phoenix_R);
            break;
        case PHOENIX5:
            reader.put("name", PluginName.Phoenix5_R);
            break;
        case DMDB:
            reader.put("name", PluginName.DM_R);
            break;
        case GREENPLUM6:
            reader.put("name", PluginName.GREENPLUM_R);
            break;
        case KINGBASE8:
            reader.put("name", PluginName.KINGBASE_R);
            break;
        case ADB_FOR_PG:
            reader.put("name", PluginName.ADB_FOR_PG_R);
            break;
        default:
            throw new RdosDefineException(ErrorCode.CAN_NOT_FITABLE_SOURCE_TYPE);
    }
    parameter.put("customSql", Optional.ofNullable(getCustomSql()).orElse(""));
    parameter.put("sourceIds", getSourceIds());
    parameter.putAll(super.getExtralConfigMap());
    reader.put("parameter", parameter);
    return reader;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) JSONArray(com.alibaba.fastjson.JSONArray) JSONObject(com.alibaba.fastjson.JSONObject)

Example 3 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class RDBWriter method toWriterJson.

@Override
public JSONObject toWriterJson() {
    JSONObject connection = new JSONObject(true);
    connection.put("jdbcUrl", this.getJdbcUrl());
    connection.put("table", this.getTable());
    connection.put("schema", isNeedSchema());
    JSONObject parameter = new JSONObject(true);
    parameter.put("username", this.getUsername());
    parameter.put("password", this.getPassword());
    parameter.put("connection", Lists.newArrayList(connection));
    parameter.put("session", StringUtils.isNotBlank(this.getSession()) ? Lists.newArrayList(this.getSession()) : Lists.newArrayList());
    parameter.put("preSql", StringUtils.isNotBlank(this.getPreSql()) ? Lists.newArrayList(this.getPreSql().trim().split(";")) : Lists.newArrayList());
    parameter.put("postSql", StringUtils.isNotBlank(this.getPostSql()) ? Lists.newArrayList(this.getPostSql().trim().split(";")) : Lists.newArrayList());
    parameter.put("writeMode", this.getWriteMode());
    parameter.put("column", ColumnUtil.getColumns(this.getColumn(), PluginName.MySQLD_R));
    parameter.put("sourceIds", getSourceIds());
    parameter.putAll(super.getExtralConfigMap());
    JSONObject writer = new JSONObject(true);
    DataSourceType dataSourceType = DataSourceType.getSourceType(getType());
    switch(dataSourceType) {
        case MySQL:
        case MySQL8:
        case MySQLPXC:
        case TiDB:
            writer.put("name", PluginName.MySQL_W);
            break;
        case Clickhouse:
            writer.put("name", PluginName.Clichhouse_W);
            break;
        case Polardb_For_MySQL:
            writer.put("name", PluginName.Polardb_for_MySQL_W);
            break;
        case Oracle:
            writer.put("name", PluginName.Oracle_W);
            break;
        case SQLServer:
            writer.put("name", PluginName.SQLServer_W);
            break;
        case HIVE:
        case HIVE1X:
            writer.put("name", PluginName.Hive_W);
            break;
        case PostgreSQL:
        case LIBRA:
            writer.put("name", PluginName.PostgreSQL_W);
            break;
        case DB2:
            writer.put("name", PluginName.DB2_W);
            break;
        case GBase_8a:
            writer.put("name", PluginName.GBase_W);
            break;
        case Phoenix:
            writer.put("name", PluginName.Phoenix_W);
            // 特殊处理写入模式,200302_3.10_beta2 只支持 upsert
            parameter.put("writeMode", StringUtils.isBlank(this.getWriteMode()) ? UPSERT : this.getWriteMode());
            break;
        case PHOENIX5:
            writer.put("name", PluginName.Phoenix5_W);
            parameter.put("writeMode", StringUtils.isBlank(this.getWriteMode()) ? UPSERT : this.getWriteMode());
            break;
        case DMDB:
            writer.put("name", PluginName.DM_W);
            break;
        case GREENPLUM6:
            writer.put("name", PluginName.GREENPLUM_W);
            break;
        case KINGBASE8:
            writer.put("name", PluginName.KINGBASE_W);
            break;
        case INCEPTOR:
            writer.put("name", PluginName.INCEPTOR_W);
            break;
        case ADB_FOR_PG:
            writer.put("name", PluginName.ADB_FOR_PG_W);
            break;
        default:
            throw new RdosDefineException(ErrorCode.CAN_NOT_FITABLE_SOURCE_TYPE);
    }
    writer.put("parameter", parameter);
    return writer;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType)

Example 4 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class DatasourceService method executeOnSpecifySourceWithOutResult.

/**
 * @param sourceId  数据源id
 * @param sql  拼写sql
 * @param targetSchema 只做doris入参,其他类型不用传
 */
private void executeOnSpecifySourceWithOutResult(Long sourceId, String sql, String targetSchema) {
    BatchDataSource source = getOne(sourceId);
    DataSourceType dataSourceType = DataSourceType.getSourceType(source.getType());
    if (!SUPPORT_CREATE_TABLE_DATASOURCES.contains(dataSourceType)) {
        throw new RdosDefineException(String.format("只支持创建%s数据源表", SUPPORT_CREATE_TABLE_DATASOURCES_NAMES));
    }
    JSONObject json = JSON.parseObject(source.getDataJson());
    try {
        Map<String, Object> kerberosConfig = fillKerberosConfig(sourceId);
        Map<String, Object> expandConfigPrepare = expandConfigPrepare(sourceId);
        ISourceDTO sourceDTO = SourceDTOType.getSourceDTO(json, source.getType(), kerberosConfig, expandConfigPrepare);
        IClient iClient = ClientCache.getClient(dataSourceType.getVal());
        Connection con = iClient.getCon(sourceDTO);
        DBUtil.executeSqlWithoutResultSet(con, sql, false);
    } catch (Exception e) {
        throw new RdosDefineException(e.getMessage() + "。 执行sql = " + sql, e);
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) Connection(java.sql.Connection) JSONObject(com.alibaba.fastjson.JSONObject) IClient(com.dtstack.dtcenter.loader.client.IClient) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) SftpException(com.jcraft.jsch.SftpException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException)

Example 5 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class HadoopDataDownloadService method queryDataFromTempTable.

/**
 * 从临时表获取表数据
 *
 * @param tenantId
 * @param tableName
 * @param db
 * @return
 * @throws Exception
 */
public List<Object> queryDataFromTempTable(Long tenantId, String tableName, String db) throws Exception {
    DataSourceType dataSourceType = datasourceService.getHadoopDefaultDataSourceByTenantId(tenantId);
    IDownload downloader = getDownloader(tenantId, tableName, db, dataSourceType.getVal());
    List<Object> result = new ArrayList<>();
    List<String> alias = downloader.getMetaInfo();
    result.add(alias);
    JdbcInfo jdbcInfo = Engine2DTOService.getJdbcInfo(tenantId, null, DataSourceTypeJobTypeMapping.getTaskTypeByDataSourceType(dataSourceType.getVal()));
    int readCounter = 0;
    while (!downloader.reachedEnd() && readCounter < jdbcInfo.getMaxRows()) {
        List<String> row = (List<String>) downloader.readNext();
        result.add(row);
        readCounter++;
    }
    return result;
}
Also used : DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ArrayList(java.util.ArrayList) IDownload(com.dtstack.taier.develop.utils.develop.common.IDownload) JSONObject(com.alibaba.fastjson.JSONObject) ArrayList(java.util.ArrayList) List(java.util.List) JdbcInfo(com.dtstack.taier.common.engine.JdbcInfo)

Aggregations

DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)8 JSONObject (com.alibaba.fastjson.JSONObject)7 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)5 IClient (com.dtstack.dtcenter.loader.client.IClient)4 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)4 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)3 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3 Map (java.util.Map)3 JSONArray (com.alibaba.fastjson.JSONArray)2 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)2 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)2 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)2 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)2 JSON (com.alibaba.fastjson.JSON)1 ClientCache (com.dtstack.dtcenter.loader.client.ClientCache)1 IKerberos (com.dtstack.dtcenter.loader.client.IKerberos)1 ITable (com.dtstack.dtcenter.loader.client.ITable)1 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)1