use of com.dtstack.taier.develop.utils.develop.sync.job.PluginName in project Taier by DTStack.
the class DatasourceService method setPluginDataSourceInfo.
private void setPluginDataSourceInfo(JSONObject plugin, Long tenantId, Integer createModel) {
String pluginName = plugin.getString("name");
JSONObject param = plugin.getJSONObject("parameter");
if (PluginName.MySQLD_R.equals(pluginName)) {
JSONArray connections = param.getJSONArray("connection");
for (int i = 0; i < connections.size(); i++) {
JSONObject conn = connections.getJSONObject(i);
if (!conn.containsKey("sourceId")) {
continue;
}
BatchDataSource source = getOne(conn.getLong("sourceId"));
JSONObject json = JSONObject.parseObject(source.getDataJson());
replaceDataSourceInfoByCreateModel(conn, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(conn, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Collections.singletonList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
}
} else {
if (!param.containsKey("sourceIds")) {
return;
}
List<Long> sourceIds = param.getJSONArray("sourceIds").toJavaList(Long.class);
if (CollectionUtils.isEmpty(sourceIds)) {
return;
}
BatchDataSource source = getOne(sourceIds.get(0));
JSONObject json = JSON.parseObject(source.getDataJson());
Integer sourceType = source.getType();
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType) && !DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
if (conn.get("jdbcUrl") instanceof String) {
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
} else {
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Arrays.asList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
}
} else if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HDFS.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
replaceDataSourceInfoByCreateModel(conn, JDBC_URL, JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
}
}
// 非meta数据源从高可用配置中取hadoopConf
if (0 == source.getIsDefault()) {
replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS).trim(), createModel);
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
setSftpConfig(source.getId(), json, tenantId, param, HADOOP_CONFIG, false);
} else {
// meta数据源从console取配置
// 拿取最新配置
String consoleHadoopConfig = this.getConsoleHadoopConfig(tenantId);
if (StringUtils.isNotBlank(consoleHadoopConfig)) {
// 替换新path 页面运行fix
JSONArray connections = param.getJSONArray("connection");
if ((DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) && Objects.nonNull(connections)) {
JSONObject conn = connections.getJSONObject(0);
String hiveTable = conn.getJSONArray("table").get(0).toString();
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
String hiveTablePath = getHiveTablePath(sourceType, hiveTable, json, kerberosConfig);
if (StringUtils.isNotEmpty(hiveTablePath)) {
replaceDataSourceInfoByCreateModel(param, "path", hiveTablePath.trim(), createModel);
}
}
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(consoleHadoopConfig), createModel);
JSONObject hadoopConfJson = JSONObject.parseObject(consoleHadoopConfig);
String defaultFs = JsonUtils.getStrFromJson(hadoopConfJson, "fs.defaultFS");
// 替换defaultFs
replaceDataSourceInfoByCreateModel(param, "defaultFS", defaultFs.trim(), createModel);
} else {
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
}
setDefaultHadoopSftpConfig(json, tenantId, param);
}
} else if (DataSourceType.HBASE.getVal().equals(sourceType)) {
String jsonStr = json.getString(HBASE_CONFIG);
Map jsonMap = new HashMap();
if (StringUtils.isNotEmpty(jsonStr)) {
try {
jsonMap = objectMapper.readValue(jsonStr, Map.class);
} catch (IOException e) {
LOGGER.error("", e);
}
}
replaceDataSourceInfoByCreateModel(param, HBASE_CONFIG, jsonMap, createModel);
if (TaskCreateModelType.GUIDE.getType().equals(createModel)) {
setSftpConfig(source.getId(), json, tenantId, param, HBASE_CONFIG, false);
}
} else if (DataSourceType.FTP.getVal().equals(sourceType)) {
if (json != null) {
json.entrySet().forEach(bean -> {
replaceDataSourceInfoByCreateModel(param, bean.getKey(), bean.getValue(), createModel);
});
}
} else if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "accessId", json.get("accessId"), createModel);
replaceDataSourceInfoByCreateModel(param, "accessKey", json.get("accessKey"), createModel);
replaceDataSourceInfoByCreateModel(param, "project", json.get("project"), createModel);
replaceDataSourceInfoByCreateModel(param, "endPoint", json.get("endPoint"), createModel);
} else if ((DataSourceType.ES.getVal().equals(sourceType))) {
replaceDataSourceInfoByCreateModel(param, "address", json.get("address"), createModel);
} else if (DataSourceType.REDIS.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "hostPort", JsonUtils.getStrFromJson(json, "hostPort"), createModel);
replaceDataSourceInfoByCreateModel(param, "database", json.getIntValue("database"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
} else if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, JDBC_HOSTPORTS, JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
replaceDataSourceInfoByCreateModel(param, "database", JsonUtils.getStrFromJson(json, "database"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
} else if (DataSourceType.Kudu.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "masterAddresses", JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
replaceDataSourceInfoByCreateModel(param, "others", JsonUtils.getStrFromJson(json, "others"), createModel);
} else if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
String tableLocation = param.getString(TableLocationType.key());
replaceDataSourceInfoByCreateModel(param, "dataSourceType", DataSourceType.IMPALA.getVal(), createModel);
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
if (TableLocationType.HIVE.getValue().equals(tableLocation)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS), createModel);
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
}
}
} else if (DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
replaceInceptorDataSource(param, json, createModel, source, tenantId);
} else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
String url = JsonUtils.getStrFromJson(json, "url");
replaceDataSourceInfoByCreateModel(conn, "url", Lists.newArrayList(url), createModel);
replaceDataSourceInfoByCreateModel(conn, "measurement", conn.getJSONArray("table"), createModel);
replaceDataSourceInfoByCreateModel(conn, "database", conn.getString("schema"), createModel);
}
}
}
}
Aggregations