Search in sources :

Example 1 with HadoopConf

use of com.dtstack.taier.develop.utils.develop.common.HadoopConf in project Taier by DTStack.

the class DatasourceService method setPluginDataSourceInfo.

private void setPluginDataSourceInfo(JSONObject plugin, Long tenantId, Integer createModel) {
    String pluginName = plugin.getString("name");
    JSONObject param = plugin.getJSONObject("parameter");
    if (PluginName.MySQLD_R.equals(pluginName)) {
        JSONArray connections = param.getJSONArray("connection");
        for (int i = 0; i < connections.size(); i++) {
            JSONObject conn = connections.getJSONObject(i);
            if (!conn.containsKey("sourceId")) {
                continue;
            }
            BatchDataSource source = getOne(conn.getLong("sourceId"));
            JSONObject json = JSONObject.parseObject(source.getDataJson());
            replaceDataSourceInfoByCreateModel(conn, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
            replaceDataSourceInfoByCreateModel(conn, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
            replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Collections.singletonList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
        }
    } else {
        if (!param.containsKey("sourceIds")) {
            return;
        }
        List<Long> sourceIds = param.getJSONArray("sourceIds").toJavaList(Long.class);
        if (CollectionUtils.isEmpty(sourceIds)) {
            return;
        }
        BatchDataSource source = getOne(sourceIds.get(0));
        JSONObject json = JSON.parseObject(source.getDataJson());
        Integer sourceType = source.getType();
        if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType) && !DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
            JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
            if (conn.get("jdbcUrl") instanceof String) {
                replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
            } else {
                replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Arrays.asList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
            }
        } else if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HDFS.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
            if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
                if (param.containsKey("connection")) {
                    JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                    replaceDataSourceInfoByCreateModel(conn, JDBC_URL, JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
                }
            }
            // 非meta数据源从高可用配置中取hadoopConf
            if (0 == source.getIsDefault()) {
                replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS).trim(), createModel);
                String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
                if (StringUtils.isNotBlank(hadoopConfig)) {
                    replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
                }
                setSftpConfig(source.getId(), json, tenantId, param, HADOOP_CONFIG, false);
            } else {
                // meta数据源从console取配置
                // 拿取最新配置
                String consoleHadoopConfig = this.getConsoleHadoopConfig(tenantId);
                if (StringUtils.isNotBlank(consoleHadoopConfig)) {
                    // 替换新path 页面运行fix
                    JSONArray connections = param.getJSONArray("connection");
                    if ((DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) && Objects.nonNull(connections)) {
                        JSONObject conn = connections.getJSONObject(0);
                        String hiveTable = conn.getJSONArray("table").get(0).toString();
                        Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
                        String hiveTablePath = getHiveTablePath(sourceType, hiveTable, json, kerberosConfig);
                        if (StringUtils.isNotEmpty(hiveTablePath)) {
                            replaceDataSourceInfoByCreateModel(param, "path", hiveTablePath.trim(), createModel);
                        }
                    }
                    replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(consoleHadoopConfig), createModel);
                    JSONObject hadoopConfJson = JSONObject.parseObject(consoleHadoopConfig);
                    String defaultFs = JsonUtils.getStrFromJson(hadoopConfJson, "fs.defaultFS");
                    // 替换defaultFs
                    replaceDataSourceInfoByCreateModel(param, "defaultFS", defaultFs.trim(), createModel);
                } else {
                    String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
                    if (StringUtils.isNotBlank(hadoopConfig)) {
                        replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
                    }
                }
                setDefaultHadoopSftpConfig(json, tenantId, param);
            }
        } else if (DataSourceType.HBASE.getVal().equals(sourceType)) {
            String jsonStr = json.getString(HBASE_CONFIG);
            Map jsonMap = new HashMap();
            if (StringUtils.isNotEmpty(jsonStr)) {
                try {
                    jsonMap = objectMapper.readValue(jsonStr, Map.class);
                } catch (IOException e) {
                    LOGGER.error("", e);
                }
            }
            replaceDataSourceInfoByCreateModel(param, HBASE_CONFIG, jsonMap, createModel);
            if (TaskCreateModelType.GUIDE.getType().equals(createModel)) {
                setSftpConfig(source.getId(), json, tenantId, param, HBASE_CONFIG, false);
            }
        } else if (DataSourceType.FTP.getVal().equals(sourceType)) {
            if (json != null) {
                json.entrySet().forEach(bean -> {
                    replaceDataSourceInfoByCreateModel(param, bean.getKey(), bean.getValue(), createModel);
                });
            }
        } else if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "accessId", json.get("accessId"), createModel);
            replaceDataSourceInfoByCreateModel(param, "accessKey", json.get("accessKey"), createModel);
            replaceDataSourceInfoByCreateModel(param, "project", json.get("project"), createModel);
            replaceDataSourceInfoByCreateModel(param, "endPoint", json.get("endPoint"), createModel);
        } else if ((DataSourceType.ES.getVal().equals(sourceType))) {
            replaceDataSourceInfoByCreateModel(param, "address", json.get("address"), createModel);
        } else if (DataSourceType.REDIS.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "hostPort", JsonUtils.getStrFromJson(json, "hostPort"), createModel);
            replaceDataSourceInfoByCreateModel(param, "database", json.getIntValue("database"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
        } else if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, JDBC_HOSTPORTS, JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
            replaceDataSourceInfoByCreateModel(param, "database", JsonUtils.getStrFromJson(json, "database"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
        } else if (DataSourceType.Kudu.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "masterAddresses", JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
            replaceDataSourceInfoByCreateModel(param, "others", JsonUtils.getStrFromJson(json, "others"), createModel);
        } else if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
            String tableLocation = param.getString(TableLocationType.key());
            replaceDataSourceInfoByCreateModel(param, "dataSourceType", DataSourceType.IMPALA.getVal(), createModel);
            String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
            if (StringUtils.isNotBlank(hadoopConfig)) {
                replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
            }
            if (TableLocationType.HIVE.getValue().equals(tableLocation)) {
                replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
                replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
                replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS), createModel);
                if (param.containsKey("connection")) {
                    JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                    replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
                }
            }
        } else if (DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
            replaceInceptorDataSource(param, json, createModel, source, tenantId);
        } else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
            replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
            replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
            if (param.containsKey("connection")) {
                JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
                String url = JsonUtils.getStrFromJson(json, "url");
                replaceDataSourceInfoByCreateModel(conn, "url", Lists.newArrayList(url), createModel);
                replaceDataSourceInfoByCreateModel(conn, "measurement", conn.getJSONArray("table"), createModel);
                replaceDataSourceInfoByCreateModel(conn, "database", conn.getString("schema"), createModel);
            }
        }
    }
}
Also used : BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) HadoopConf(com.dtstack.taier.develop.utils.develop.common.HadoopConf) DefaultSetting(com.dtstack.taier.develop.utils.develop.sync.template.DefaultSetting) StringUtils(org.apache.commons.lang.StringUtils) Arrays(java.util.Arrays) SourceDTOType(com.dtstack.taier.develop.enums.develop.SourceDTOType) ISourceDTO(com.dtstack.dtcenter.loader.dto.source.ISourceDTO) Autowired(org.springframework.beans.factory.annotation.Autowired) DtStringUtil(com.dtstack.taier.pluginapi.util.DtStringUtil) Matcher(java.util.regex.Matcher) HBaseReader(com.dtstack.taier.develop.utils.develop.sync.template.HBaseReader) Pair(org.apache.commons.lang3.tuple.Pair) FormNames(com.dtstack.taier.common.constant.FormNames) SftpException(com.jcraft.jsch.SftpException) Map(java.util.Map) EDataSourcePermission(com.dtstack.taier.develop.enums.develop.EDataSourcePermission) AwsS3Reader(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Reader) MongoDbReader(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbReader) HDFSWriter(com.dtstack.taier.develop.utils.develop.sync.template.HDFSWriter) CarbonDataWriter(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataWriter) OdpsReader(com.dtstack.taier.develop.utils.develop.sync.template.OdpsReader) Strings(com.dtstack.taier.common.util.Strings) ColumnType(com.dtstack.taier.develop.utils.develop.sync.format.ColumnType) DsFormField(com.dtstack.taier.dao.domain.DsFormField) RDBMSSourceType(com.dtstack.taier.develop.enums.develop.RDBMSSourceType) Set(java.util.Set) DataBaseType(com.dtstack.dtcenter.loader.source.DataBaseType) FtpReader(com.dtstack.taier.develop.utils.develop.sync.template.FtpReader) SqlQueryDTO(com.dtstack.dtcenter.loader.dto.SqlQueryDTO) Engine2DTOService(com.dtstack.taier.develop.utils.develop.service.impl.Engine2DTOService) Setting(com.dtstack.taier.develop.common.template.Setting) DBUtil(com.dtstack.dtcenter.loader.utils.DBUtil) JdbcInfo(com.dtstack.taier.common.engine.JdbcInfo) EsReader(com.dtstack.taier.develop.utils.develop.sync.template.EsReader) EnvironmentContext(com.dtstack.taier.common.env.EnvironmentContext) IClient(com.dtstack.dtcenter.loader.client.IClient) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) BatchTaskParamService(com.dtstack.taier.develop.service.develop.impl.BatchTaskParamService) SqlFormatter(com.dtstack.taier.develop.sql.formate.SqlFormatter) ArrayList(java.util.ArrayList) DataSourceUtils(com.dtstack.taier.common.util.DataSourceUtils) LinkedHashMap(java.util.LinkedHashMap) JSONArray(com.alibaba.fastjson.JSONArray) Lists(com.google.common.collect.Lists) HadoopConfTool(com.dtstack.dtcenter.loader.kerberos.HadoopConfTool) EsWriter(com.dtstack.taier.develop.utils.develop.sync.template.EsWriter) Service(org.springframework.stereotype.Service) OdpsWriter(com.dtstack.taier.develop.utils.develop.sync.template.OdpsWriter) ClusterService(com.dtstack.taier.scheduler.service.ClusterService) DataSourceType(com.dtstack.dtcenter.loader.source.DataSourceType) ErrorCode(com.dtstack.taier.common.exception.ErrorCode) HiveWriter(com.dtstack.taier.develop.utils.develop.sync.template.HiveWriter) CreateTableSqlParseUtil(com.dtstack.taier.develop.utils.develop.sync.util.CreateTableSqlParseUtil) FileUtils(org.apache.commons.io.FileUtils) IOException(java.io.IOException) SqlFormatUtil(com.dtstack.taier.develop.utils.develop.common.util.SqlFormatUtil) File(java.io.File) JSON(com.alibaba.fastjson.JSON) RedisWriter(com.dtstack.taier.develop.utils.develop.sync.template.RedisWriter) ADBForPGUtil(com.dtstack.taier.develop.utils.develop.sync.util.ADBForPGUtil) PublicUtil(com.dtstack.taier.common.util.PublicUtil) BatchDataSource(com.dtstack.taier.dao.domain.BatchDataSource) HiveReader(com.dtstack.taier.develop.utils.develop.sync.template.HiveReader) AwsS3Writer(com.dtstack.taier.develop.utils.develop.sync.template.AwsS3Writer) PostgreSqlWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.PostgreSqlWriterFormat) OdpsBase(com.dtstack.taier.develop.utils.develop.sync.template.OdpsBase) Connection(java.sql.Connection) Date(java.util.Date) HBaseWriter(com.dtstack.taier.develop.utils.develop.sync.template.HBaseWriter) DataSourceVO(com.dtstack.taier.develop.dto.devlop.DataSourceVO) LoggerFactory(org.slf4j.LoggerFactory) HDFSReader(com.dtstack.taier.develop.utils.develop.sync.template.HDFSReader) CarbonDataReader(com.dtstack.taier.develop.utils.develop.sync.template.CarbonDataReader) RDBWriter(com.dtstack.taier.develop.utils.develop.sync.template.RDBWriter) ColumnMetaDTO(com.dtstack.dtcenter.loader.dto.ColumnMetaDTO) Locale(java.util.Locale) SyncBuilderFactory(com.dtstack.taier.develop.utils.develop.sync.handler.SyncBuilderFactory) Asserts(com.dtstack.taier.develop.utils.Asserts) InceptorWriter(com.dtstack.taier.develop.utils.develop.sync.template.InceptorWriter) IKerberos(com.dtstack.dtcenter.loader.client.IKerberos) MongoDbWriter(com.dtstack.taier.develop.utils.develop.sync.template.MongoDbWriter) PubSvcDefineException(com.dtstack.taier.common.exception.PubSvcDefineException) Reader(com.dtstack.taier.develop.common.template.Reader) ClientCache(com.dtstack.dtcenter.loader.client.ClientCache) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) TaskResourceParam(com.dtstack.taier.develop.dto.devlop.TaskResourceParam) Objects(java.util.Objects) List(java.util.List) Optional(java.util.Optional) JSONObject(com.alibaba.fastjson.JSONObject) PluginName(com.dtstack.taier.develop.utils.develop.sync.job.PluginName) DataSourceTypeEnum(com.dtstack.taier.common.enums.DataSourceTypeEnum) FtpWriter(com.dtstack.taier.develop.utils.develop.sync.template.FtpWriter) Pattern(java.util.regex.Pattern) BeanUtils(org.springframework.beans.BeanUtils) TypeFormat(com.dtstack.taier.develop.utils.develop.sync.format.TypeFormat) HashMap(java.util.HashMap) HashSet(java.util.HashSet) ComponentTypeDataSourceTypeMapping(com.dtstack.taier.develop.utils.develop.mapping.ComponentTypeDataSourceTypeMapping) TaskCreateModelType(com.dtstack.taier.develop.enums.develop.TaskCreateModelType) CollectionUtils(org.apache.commons.collections.CollectionUtils) Writer(com.dtstack.taier.develop.common.template.Writer) HiveWriterFormat(com.dtstack.taier.develop.utils.develop.sync.format.writer.HiveWriterFormat) DtCenterDefException(com.dtstack.taier.common.exception.DtCenterDefException) RDBBase(com.dtstack.taier.develop.utils.develop.sync.template.RDBBase) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) MapUtils(org.apache.commons.collections.MapUtils) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) DsInfo(com.dtstack.taier.dao.domain.DsInfo) Maps(com.google.common.collect.Maps) TableLocationType(com.dtstack.taier.develop.enums.develop.TableLocationType) EComponentType(com.dtstack.taier.common.enums.EComponentType) InfluxDBReader(com.dtstack.taier.develop.utils.develop.sync.template.InfluxDBReader) JobTemplate(com.dtstack.taier.develop.utils.develop.sync.job.JobTemplate) StringJoiner(java.util.StringJoiner) RDBReader(com.dtstack.taier.develop.utils.develop.sync.template.RDBReader) DataSourceDataBaseType(com.dtstack.taier.develop.enums.develop.DataSourceDataBaseType) Collections(java.util.Collections) JsonUtils(com.dtstack.taier.common.util.JsonUtils) Transactional(org.springframework.transaction.annotation.Transactional) Assert(org.springframework.util.Assert) JSONObject(com.alibaba.fastjson.JSONObject) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) JSONArray(com.alibaba.fastjson.JSONArray) IOException(java.io.IOException) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Aggregations

JSON (com.alibaba.fastjson.JSON)1 JSONArray (com.alibaba.fastjson.JSONArray)1 JSONObject (com.alibaba.fastjson.JSONObject)1 ClientCache (com.dtstack.dtcenter.loader.client.ClientCache)1 IClient (com.dtstack.dtcenter.loader.client.IClient)1 IKerberos (com.dtstack.dtcenter.loader.client.IKerberos)1 ColumnMetaDTO (com.dtstack.dtcenter.loader.dto.ColumnMetaDTO)1 SqlQueryDTO (com.dtstack.dtcenter.loader.dto.SqlQueryDTO)1 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)1 HadoopConfTool (com.dtstack.dtcenter.loader.kerberos.HadoopConfTool)1 DataBaseType (com.dtstack.dtcenter.loader.source.DataBaseType)1 DataSourceType (com.dtstack.dtcenter.loader.source.DataSourceType)1 DBUtil (com.dtstack.dtcenter.loader.utils.DBUtil)1 FormNames (com.dtstack.taier.common.constant.FormNames)1 JdbcInfo (com.dtstack.taier.common.engine.JdbcInfo)1 DataSourceTypeEnum (com.dtstack.taier.common.enums.DataSourceTypeEnum)1 EComponentType (com.dtstack.taier.common.enums.EComponentType)1 EnvironmentContext (com.dtstack.taier.common.env.EnvironmentContext)1 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)1 ErrorCode (com.dtstack.taier.common.exception.ErrorCode)1