Search in sources :

Example 6 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class JdbcServiceImpl method executeQueryWithVariables.

/**
 * 执行查询
 * @param tenantId
 * @param userId
 * @param eScheduleJobType
 * @param schema
 * @param sql
 * @param variables
 * @param connection
 * @return
 */
@Override
public List<List<Object>> executeQueryWithVariables(Long tenantId, Long userId, EScheduleJobType eScheduleJobType, String schema, String sql, List<String> variables, Connection connection) {
    List<List<Object>> returnList = new ArrayList<>();
    JdbcInfo jdbcInfo = Engine2DTOService.getJdbcInfo(tenantId, userId, eScheduleJobType);
    DataSourceType dataSourceType = Engine2DTOService.jobTypeTransitionDataSourceType(eScheduleJobType, jdbcInfo.getVersion());
    ISourceDTO iSourceDTO = Engine2DTOService.get(tenantId, userId, dataSourceType.getVal(), schema, jdbcInfo);
    IClient client = ClientCache.getClient(iSourceDTO.getSourceType());
    List<Map<String, Object>> list = null;
    iSourceDTO.setConnection(connection);
    // 处理 variables SQL
    if (CollectionUtils.isNotEmpty(variables)) {
        variables.forEach(variable -> client.executeSqlWithoutResultSet(iSourceDTO, SqlQueryDTO.builder().sql(variable).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build()));
        list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build());
    } else {
        list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build());
    }
    LOGGER.info("集群执行SQL查询,tenantId:{},userId:{},jobType:{},schema:{},sql:{}", tenantId, userId, eScheduleJobType.getType(), schema, sql);
    // 数据源插件化 查询出值不符合要求  进行转化
    if (CollectionUtils.isNotEmpty(list)) {
        List<Object> column = new ArrayList<>();
        list.get(0).keySet().stream().forEach(bean -> {
            column.add(bean);
        });
        returnList.add(column);
        for (Map<String, Object> result : list) {
            List<Object> value = new ArrayList<>();
            result.values().forEach(bean -> {
                value.add(bean);
            });
            returnList.add(value);
        }
    }
    return returnList;
}
Also used : ArrayList(java.util.ArrayList) IClient(com.dtstack.dtcenter.loader.client.IClient) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ArrayList(java.util.ArrayList) List(java.util.List) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Map(java.util.Map) JdbcInfo(com.dtstack.taier.common.engine.JdbcInfo)

Example 7 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class DatasourceService method setPluginDataSourceInfo.

private void setPluginDataSourceInfo(JSONObject plugin, Long tenantId, Integer createModel) {
    String pluginName = plugin.getString("name");
    JSONObject param = plugin.getJSONObject("parameter");
    if (PluginName.MySQLD_R.equals(pluginName)) {
        JSONArray connections = param.getJSONArray("connection");
        for (int i = 0; i < connections.size(); i++) {
            JSONObject conn = connections.getJSONObject(i);
            if (!conn.containsKey("sourceId")) {
                continue;
            }
            BatchDataSource source = getOne(conn.getLong("sourceId"));
            JSONObject json = JSONObject.parseObject(source.getDataJson());
            replaceDataSourceInfoByCreateModel(conn, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
            replaceDataSourceInfoByCreateModel(conn, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
            replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Collections.singletonList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
        }
    } else {
        if (!param.containsKey("sourceIds")) {
            return;
        }
        List<Long> sourceIds = param.getJSONArray("sourceIds").toJavaList(Long.class);
        if (CollectionUtils.isEmpty(sourceIds)) {
            return;
        }
        BatchDataSource source = getOne(sourceIds.get(0));
        JSONObject json = JSON.parseObject(source.getDataJson());
        Integer sourceType = source.getType();
        if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType) && !DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
            JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
            if (conn.get("jdbcUrl") instanceof String) {
                replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
            } else {
                replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Arrays.asList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
            }
        } else if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HDFS.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
            if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
                if (param.containsKey("connection")) {
                    JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                    replaceDataSourceInfoByCreateModel(conn, JDBC_URL, JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
                }
            }
            // 非meta数据源从高可用配置中取hadoopConf
            if (0 == source.getIsDefault()) {
                replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS).trim(), createModel);
                String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
                if (StringUtils.isNotBlank(hadoopConfig)) {
                    replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
                }
                setSftpConfig(source.getId(), json, tenantId, param, HADOOP_CONFIG, false);
            } else {
                // meta数据源从console取配置
                // 拿取最新配置
                String consoleHadoopConfig = this.getConsoleHadoopConfig(tenantId);
                if (StringUtils.isNotBlank(consoleHadoopConfig)) {
                    // 替换新path 页面运行fix
                    JSONArray connections = param.getJSONArray("connection");
                    if ((DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) && Objects.nonNull(connections)) {
                        JSONObject conn = connections.getJSONObject(0);
                        String hiveTable = conn.getJSONArray("table").get(0).toString();
                        Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
                        String hiveTablePath = getHiveTablePath(sourceType, hiveTable, json, kerberosConfig);
                        if (StringUtils.isNotEmpty(hiveTablePath)) {
                            replaceDataSourceInfoByCreateModel(param, "path", hiveTablePath.trim(), createModel);
                        }
                    }
                    replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(consoleHadoopConfig), createModel);
                    JSONObject hadoopConfJson = JSONObject.parseObject(consoleHadoopConfig);
                    String defaultFs = JsonUtils.getStrFromJson(hadoopConfJson, "fs.defaultFS");
                    // 替换defaultFs
                    replaceDataSourceInfoByCreateModel(param, "defaultFS", defaultFs.trim(), createModel);
                } else {
                    String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
                    if (StringUtils.isNotBlank(hadoopConfig)) {
                        replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
                    }
                }
                setDefaultHadoopSftpConfig(json, tenantId, param);
            }
        } else if (DataSourceType.HBASE.getVal().equals(sourceType)) {
            String jsonStr = json.getString(HBASE_CONFIG);
            Map jsonMap = new HashMap();
            if (StringUtils.isNotEmpty(jsonStr)) {
                try {
                    jsonMap = objectMapper.readValue(jsonStr, Map.class);
                } catch (IOException e) {
                    LOGGER.error("", e);
                }
            }
            replaceDataSourceInfoByCreateModel(param, HBASE_CONFIG, jsonMap, createModel);
            if (TaskCreateModelType.GUIDE.getType().equals(createModel)) {
                setSftpConfig(source.getId(), json, tenantId, param, HBASE_CONFIG, false);
            }
        } else if (DataSourceType.FTP.getVal().equals(sourceType)) {
            if (json != null) {
                json.entrySet().forEach(bean -> {
                    replaceDataSourceInfoByCreateModel(param, bean.getKey(), bean.getValue(), createModel);
                });
            }
        } else if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "accessId", json.get("accessId"), createModel);
            replaceDataSourceInfoByCreateModel(param, "accessKey", json.get("accessKey"), createModel);
            replaceDataSourceInfoByCreateModel(param, "project", json.get("project"), createModel);
            replaceDataSourceInfoByCreateModel(param, "endPoint", json.get("endPoint"), createModel);
        } else if ((DataSourceType.ES.getVal().equals(sourceType))) {
            replaceDataSourceInfoByCreateModel(param, "address", json.get("address"), createModel);
        } else if (DataSourceType.REDIS.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "hostPort", JsonUtils.getStrFromJson(json, "hostPort"), createModel);
            replaceDataSourceInfoByCreateModel(param, "database", json.getIntValue("database"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
        } else if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, JDBC_HOSTPORTS, JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
            replaceDataSourceInfoByCreateModel(param, "database", JsonUtils.getStrFromJson(json, "database"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
        } else if (DataSourceType.Kudu.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "masterAddresses", JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
            replaceDataSourceInfoByCreateModel(param, "others", JsonUtils.getStrFromJson(json, "others"), createModel);
        } else if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
            String tableLocation = param.getString(TableLocationType.key());
            replaceDataSourceInfoByCreateModel(param, "dataSourceType", DataSourceType.IMPALA.getVal(), createModel);
            String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
            if (StringUtils.isNotBlank(hadoopConfig)) {
                replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
            }
            if (TableLocationType.HIVE.getValue().equals(tableLocation)) {
                replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
                replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
                replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS), createModel);
                if (param.containsKey("connection")) {
                    JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                    replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
                }
            }
        } else if (DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
            replaceInceptorDataSource(param, json, createModel, source, tenantId);
        } else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
            if (param.containsKey("connection")) {
                JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                String url = JsonUtils.getStrFromJson(json, "url");
                replaceDataSourceInfoByCreateModel(conn, "url", Lists.newArrayList(url), createModel);
                replaceDataSourceInfoByCreateModel(conn, "measurement", conn.getJSONArray("table"), createModel);
                replaceDataSourceInfoByCreateModel(conn, "database", conn.getString("schema"), createModel);
            }
        }
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) HadoopConf(com.dtstack.taier.develop.utils.develop.common.HadoopConf) DefaultSetting(com.dtstack.taier.develop.utils.develop.sync.template.DefaultSetting) StringUtils(org.apache.commons.lang.StringUtils) Arrays(java.util.Arrays) SourceDTOType(com.dtstack.taier.develop.enums.develop.SourceDTOType) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Autowired(org.springframework.beans.factory.annotation.Autowired) DtStringUtil(com.dtstack.taier.pluginapi.util.DtStringUtil) Matcher(java.util.regex.Matcher) HBaseReader(com.dtstack.taier.develop.utils.develop.sync.template.HBaseReader) Pair(org.apache.commons.lang3.tuple.Pair) FormNames(com.dtstack.taier.common.constant.FormNames) SftpException(com.jcraft.jsch.SftpException) Map(java.util.Map) EDataSourcePermission(com.dtstack.taier.develop.enums.develop.EDataSourcePermission) AwsS3Reader(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Reader) MongoDbReader(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbReader) HDFSWriter(com.dtstack.taier.develop.utils.develop.sync.template.HDFSWriter) CarbonDataWriter(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataWriter) OdpsReader(com.dtstack.taier.develop.utils.develop.sync.template.OdpsReader) Strings(com.dtstack.taier.common.util.Strings) ColumnType(com.dtstack.taier.develop.utils.develop.sync.format.ColumnType) DsFormField(com.dtstack.taier.dao.domain.DsFormField) RDBMSSourceType(com.dtstack.taier.develop.enums.develop.RDBMSSourceType) Set(java.util.Set) DataBaseType(com.dtstack.dtcenter.loader.source.DataBaseType) FtpReader(com.dtstack.taier.develop.utils.develop.sync.template.FtpReader) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO) Engine2DTOService(com.dtstack.taier.develop.utils.develop.service.impl.Engine2DTOService) Setting(com.dtstack.taier.develop.common.template.Setting) DBUtil(com.dtstack.dtcenter.loader.utils.DBUtil) JdbcInfo(com.dtstack.taier.common.engine.JdbcInfo) EsReader(com.dtstack.taier.develop.utils.develop.sync.template.EsReader) EnvironmentContext(com.dtstack.taier.common.env.EnvironmentContext) IClient(com.dtstack.dtcenter.loader.client.IClient) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) BatchTaskParamService(com.dtstack.taier.develop.service.develop.impl.BatchTaskParamService) SqlFormatter(com.dtstack.taier.develop.sql.formate.SqlFormatter) ArrayList(java.util.ArrayList) DataSourceUtils(com.dtstack.taier.common.util.DataSourceUtils) LinkedHashMap(java.util.LinkedHashMap) JSONArray(com.alibaba.fastjson.JSONArray) Lists(com.google.common.collect.Lists) HadoopConfTool(com.dtstack.dtcenter.loader.kerberos.HadoopConfTool) EsWriter(com.dtstack.taier.develop.utils.develop.sync.template.EsWriter) Service(org.springframework.stereotype.Service) OdpsWriter(com.dtstack.taier.develop.utils.develop.sync.template.OdpsWriter) ClusterService(com.dtstack.taier.scheduler.service.ClusterService) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ErrorCode(com.dtstack.taier.common.exception.ErrorCode) HiveWriter(com.dtstack.taier.develop.utils.develop.sync.template.HiveWriter) CreateTableSqlParseUtil(com.dtstack.taier.develop.utils.develop.sync.util.CreateTableSqlParseUtil) FileUtils(org.apache.commons.io.FileUtils) IOException(java.io.IOException) SqlFormatUtil(com.dtstack.taier.develop.utils.develop.common.util.SqlFormatUtil) File(java.io.File) JSON(com.alibaba.fastjson.JSON) RedisWriter(com.dtstack.taier.develop.utils.develop.sync.template.RedisWriter) ADBForPGUtil(com.dtstack.taier.develop.utils.develop.sync.util.ADBForPGUtil) PublicUtil(com.dtstack.taier.common.util.PublicUtil) BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) HiveReader(com.dtstack.taier.develop.utils.develop.sync.template.HiveReader) AwsS3Writer(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Writer) PostgreSqlWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat) OdpsBase(com.dtstack.taier.develop.utils.develop.sync.template.OdpsBase) Connection(java.sql.Connection) Date(java.util.Date) HBaseWriter(com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter) DataSourceVO(com.dtstack.taier.develop.dto.devlop.DataSourceVO) LoggerFactory(org.slf4j.LoggerFactory) HDFSReader(com.dtstack.taier.develop.utils.develop.sync.template.HDFSReader) CarbonDataReader(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataReader) RDBWriter(com.dtstack.taier.develop.utils.develop.sync.template.RDBWriter) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) Locale(java.util.Locale) SyncBuilderFactory(com.dtstack.taier.develop.utils.develop.sync.handler.SyncBuilderFactory) Asserts(com.dtstack.taier.develop.utils.Asserts) InceptorWriter(com.dtstack.taier.develop.utils.develop.sync.template.InceptorWriter) IKerberos(com.dtstack.dtcenter.loader.client.IKerberos) MongoDbWriter(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbWriter) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) Reader(com.dtstack.taier.develop.common.template.Reader) ClientCache(com.dtstack.dtcenter.loader.client.ClientCache) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) TaskResourceParam(com.dtstack.taier.develop.dto.devlop.TaskResourceParam) Objects(java.util.Objects) List(java.util.List) Optional(java.util.Optional) JSONObject(com.alibaba.fastjson.JSONObject) PluginName(com.dtstack.taier.develop.utils.develop.sync.job.PluginName) DataSourceTypeEnum(com.dtstack.taier.common.enums.DataSourceTypeEnum) FtpWriter(com.dtstack.taier.develop.utils.develop.sync.template.FtpWriter) Pattern(java.util.regex.Pattern) BeanUtils(org.springframework.beans.BeanUtils) TypeFormat(com.dtstack.taier.develop.utils.develop.sync.format.TypeFormat) HashMap(java.util.HashMap) HashSet(java.util.HashSet) ComponentTypeDataSourceTypeMapping(com.dtstack.taier.develop.utils.develop.mapping.ComponentTypeDataSourceTypeMapping) TaskCreateModelType(com.dtstack.taier.develop.enums.develop.TaskCreateModelType) CollectionUtils(org.apache.commons.collections.CollectionUtils) Writer(com.dtstack.taier.develop.common.template.Writer) HiveWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.HiveWriterFormat) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException) RDBBase(com.dtstack.taier.develop.utils.develop.sync.template.RDBBase) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) MapUtils(org.apache.commons.collections.MapUtils) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DsInfo(com.dtstack.taier.dao.domain.DsInfo) Maps(com.google.common.collect.Maps) TableLocationType(com.dtstack.taier.develop.enums.develop.TableLocationType) EComponentType(com.dtstack.taier.common.enums.EComponentType) InfluxDBReader(com.dtstack.taier.develop.utils.develop.sync.template.InfluxDBReader) JobTemplate(com.dtstack.taier.develop.utils.develop.sync.job.JobTemplate) StringJoiner(java.util.StringJoiner) RDBReader(com.dtstack.taier.develop.utils.develop.sync.template.RDBReader) DataSourceDataBaseType(com.dtstack.taier.develop.enums.develop.DataSourceDataBaseType) Collections(java.util.Collections) JsonUtils(com.dtstack.taier.common.util.JsonUtils) Transactional(org.springframework.transaction.annotation.Transactional) Assert(org.springframework.util.Assert) JSONObject(com.alibaba.fastjson.JSONObject) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) JSONArray(com.alibaba.fastjson.JSONArray) IOException(java.io.IOException) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 8 with DataSourceType

use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.

the class BatchHadoopJobExeService method readyForSyncImmediatelyJob.

@Override
public Map<String, Object> readyForSyncImmediatelyJob(BatchTask batchTask, Long tenantId, Boolean isRoot) {
    if (!batchTask.getTaskType().equals(EScheduleJobType.SYNC.getVal())) {
        throw new RdosDefineException("只支持同步任务直接运行");
    }
    Map<String, Object> actionParam = Maps.newHashMap();
    try {
        String taskParams = batchTask.getTaskParams();
        List<BatchTaskParam> taskParamsToReplace = batchTaskParamService.getTaskParam(batchTask.getId());
        JSONObject syncJob = JSON.parseObject(Base64Util.baseDecode(batchTask.getSqlText()));
        taskParams = replaceSyncParll(taskParams, parseSyncChannel(syncJob));
        String job = syncJob.getString("job");
        // 向导模式根据job中的sourceId填充数据源信息,保证每次运行取到最新的连接信息
        job = datasourceService.setJobDataSourceInfo(job, tenantId, syncJob.getIntValue("createModel"));
        batchTaskParamService.checkParams(batchTaskParamService.checkSyncJobParams(job), taskParamsToReplace);
        String name = "run_sync_task_" + batchTask.getName() + "_" + System.currentTimeMillis();
        String taskExeArgs = String.format(JOB_ARGS_TEMPLATE, name, job);
        actionParam.put("taskSourceId", batchTask.getId());
        actionParam.put("taskType", EScheduleJobType.SYNC.getVal());
        actionParam.put("name", name);
        actionParam.put("computeType", batchTask.getComputeType());
        actionParam.put("sqlText", "");
        actionParam.put("taskParams", taskParams);
        actionParam.put("tenantId", tenantId);
        actionParam.put("sourceType", SourceType.TEMP_QUERY.getType());
        actionParam.put("isFailRetry", false);
        actionParam.put("maxRetryNum", 0);
        actionParam.put("job", job);
        actionParam.put("taskParamsToReplace", JSON.toJSONString(taskParamsToReplace));
        DataSourceType writerDataSourceType = getSyncJobWriterDataSourceType(job);
        if (Objects.nonNull(writerDataSourceType)) {
            actionParam.put("dataSourceType", writerDataSourceType.getVal());
        }
        if (Objects.nonNull(taskExeArgs)) {
            actionParam.put("exeArgs", taskExeArgs);
        }
    } catch (Exception e) {
        throw new RdosDefineException(String.format("创建数据同步job失败: %s", e.getMessage()), e);
    }
    return actionParam;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) BatchTaskParam(com.dtstack.taier.dao.domain.BatchTaskParam) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) IOException(java.io.IOException)

Aggregations

DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)8 JSONObject (com.alibaba.fastjson.JSONObject)7 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)5 IClient (com.dtstack.dtcenter.loader.client.IClient)4 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)4 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)3 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 List (java.util.List)3 Map (java.util.Map)3 JSONArray (com.alibaba.fastjson.JSONArray)2 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)2 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)2 PubSvcDefineException (com.dtstack.taier.common.exception.PubSvcDefineException)2 BatchDataSource (com.dtstack.taier.dao.domain.BatchDataSource)2 JSON (com.alibaba.fastjson.JSON)1 ClientCache (com.dtstack.dtcenter.loader.client.ClientCache)1 IKerberos (com.dtstack.dtcenter.loader.client.IKerberos)1 ITable (com.dtstack.dtcenter.loader.client.ITable)1 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)1