use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class JdbcServiceImpl method executeQueryWithVariables.
/**
* 执行查询
* @param tenantId
* @param userId
* @param eScheduleJobType
* @param schema
* @param sql
* @param variables
* @param connection
* @return
*/
@Override
public List<List<Object>> executeQueryWithVariables(Long tenantId, Long userId, EScheduleJobType eScheduleJobType, String schema, String sql, List<String> variables, Connection connection) {
List<List<Object>> returnList = new ArrayList<>();
JdbcInfo jdbcInfo = Engine2DTOService.getJdbcInfo(tenantId, userId, eScheduleJobType);
DataSourceType dataSourceType = Engine2DTOService.jobTypeTransitionDataSourceType(eScheduleJobType, jdbcInfo.getVersion());
ISourceDTO iSourceDTO = Engine2DTOService.get(tenantId, userId, dataSourceType.getVal(), schema, jdbcInfo);
IClient client = ClientCache.getClient(iSourceDTO.getSourceType());
List<Map<String, Object>> list = null;
iSourceDTO.setConnection(connection);
// 处理 variables SQL
if (CollectionUtils.isNotEmpty(variables)) {
variables.forEach(variable -> client.executeSqlWithoutResultSet(iSourceDTO, SqlQueryDTO.builder().sql(variable).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build()));
list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build());
} else {
list = client.executeQuery(iSourceDTO, SqlQueryDTO.builder().sql(sql).limit(jdbcInfo.getMaxRows()).queryTimeout(jdbcInfo.getQueryTimeout()).build());
}
LOGGER.info("集群执行SQL查询,tenantId:{},userId:{},jobType:{},schema:{},sql:{}", tenantId, userId, eScheduleJobType.getType(), schema, sql);
// 数据源插件化 查询出值不符合要求 进行转化
if (CollectionUtils.isNotEmpty(list)) {
List<Object> column = new ArrayList<>();
list.get(0).keySet().stream().forEach(bean -> {
column.add(bean);
});
returnList.add(column);
for (Map<String, Object> result : list) {
List<Object> value = new ArrayList<>();
result.values().forEach(bean -> {
value.add(bean);
});
returnList.add(value);
}
}
return returnList;
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class DatasourceService method setPluginDataSourceInfo.
private void setPluginDataSourceInfo(JSONObject plugin, Long tenantId, Integer createModel) {
String pluginName = plugin.getString("name");
JSONObject param = plugin.getJSONObject("parameter");
if (PluginName.MySQLD_R.equals(pluginName)) {
JSONArray connections = param.getJSONArray("connection");
for (int i = 0; i < connections.size(); i++) {
JSONObject conn = connections.getJSONObject(i);
if (!conn.containsKey("sourceId")) {
continue;
}
BatchDataSource source = getOne(conn.getLong("sourceId"));
JSONObject json = JSONObject.parseObject(source.getDataJson());
replaceDataSourceInfoByCreateModel(conn, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(conn, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Collections.singletonList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
}
} else {
if (!param.containsKey("sourceIds")) {
return;
}
List<Long> sourceIds = param.getJSONArray("sourceIds").toJavaList(Long.class);
if (CollectionUtils.isEmpty(sourceIds)) {
return;
}
BatchDataSource source = getOne(sourceIds.get(0));
JSONObject json = JSON.parseObject(source.getDataJson());
Integer sourceType = source.getType();
if (Objects.nonNull(RDBMSSourceType.getByDataSourceType(sourceType)) && !DataSourceType.HIVE.getVal().equals(sourceType) && !DataSourceType.HIVE3X.getVal().equals(sourceType) && !DataSourceType.HIVE1X.getVal().equals(sourceType) && !DataSourceType.IMPALA.getVal().equals(sourceType) && !DataSourceType.SparkThrift2_1.getVal().equals(sourceType) && !DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
if (conn.get("jdbcUrl") instanceof String) {
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
} else {
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", Arrays.asList(JsonUtils.getStrFromJson(json, JDBC_URL)), createModel);
}
} else if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HDFS.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
if (DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) {
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
replaceDataSourceInfoByCreateModel(conn, JDBC_URL, JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
}
}
// 非meta数据源从高可用配置中取hadoopConf
if (0 == source.getIsDefault()) {
replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS).trim(), createModel);
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
setSftpConfig(source.getId(), json, tenantId, param, HADOOP_CONFIG, false);
} else {
// meta数据源从console取配置
// 拿取最新配置
String consoleHadoopConfig = this.getConsoleHadoopConfig(tenantId);
if (StringUtils.isNotBlank(consoleHadoopConfig)) {
// 替换新path 页面运行fix
JSONArray connections = param.getJSONArray("connection");
if ((DataSourceType.HIVE.getVal().equals(sourceType) || DataSourceType.HIVE1X.getVal().equals(sourceType) || DataSourceType.HIVE3X.getVal().equals(sourceType) || DataSourceType.SparkThrift2_1.getVal().equals(sourceType)) && Objects.nonNull(connections)) {
JSONObject conn = connections.getJSONObject(0);
String hiveTable = conn.getJSONArray("table").get(0).toString();
Map<String, Object> kerberosConfig = fillKerberosConfig(source.getId());
String hiveTablePath = getHiveTablePath(sourceType, hiveTable, json, kerberosConfig);
if (StringUtils.isNotEmpty(hiveTablePath)) {
replaceDataSourceInfoByCreateModel(param, "path", hiveTablePath.trim(), createModel);
}
}
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(consoleHadoopConfig), createModel);
JSONObject hadoopConfJson = JSONObject.parseObject(consoleHadoopConfig);
String defaultFs = JsonUtils.getStrFromJson(hadoopConfJson, "fs.defaultFS");
// 替换defaultFs
replaceDataSourceInfoByCreateModel(param, "defaultFS", defaultFs.trim(), createModel);
} else {
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
}
setDefaultHadoopSftpConfig(json, tenantId, param);
}
} else if (DataSourceType.HBASE.getVal().equals(sourceType)) {
String jsonStr = json.getString(HBASE_CONFIG);
Map jsonMap = new HashMap();
if (StringUtils.isNotEmpty(jsonStr)) {
try {
jsonMap = objectMapper.readValue(jsonStr, Map.class);
} catch (IOException e) {
LOGGER.error("", e);
}
}
replaceDataSourceInfoByCreateModel(param, HBASE_CONFIG, jsonMap, createModel);
if (TaskCreateModelType.GUIDE.getType().equals(createModel)) {
setSftpConfig(source.getId(), json, tenantId, param, HBASE_CONFIG, false);
}
} else if (DataSourceType.FTP.getVal().equals(sourceType)) {
if (json != null) {
json.entrySet().forEach(bean -> {
replaceDataSourceInfoByCreateModel(param, bean.getKey(), bean.getValue(), createModel);
});
}
} else if (DataSourceType.MAXCOMPUTE.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "accessId", json.get("accessId"), createModel);
replaceDataSourceInfoByCreateModel(param, "accessKey", json.get("accessKey"), createModel);
replaceDataSourceInfoByCreateModel(param, "project", json.get("project"), createModel);
replaceDataSourceInfoByCreateModel(param, "endPoint", json.get("endPoint"), createModel);
} else if ((DataSourceType.ES.getVal().equals(sourceType))) {
replaceDataSourceInfoByCreateModel(param, "address", json.get("address"), createModel);
} else if (DataSourceType.REDIS.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "hostPort", JsonUtils.getStrFromJson(json, "hostPort"), createModel);
replaceDataSourceInfoByCreateModel(param, "database", json.getIntValue("database"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
} else if (DataSourceType.MONGODB.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, JDBC_HOSTPORTS, JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
replaceDataSourceInfoByCreateModel(param, "database", JsonUtils.getStrFromJson(json, "database"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
} else if (DataSourceType.Kudu.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "masterAddresses", JsonUtils.getStrFromJson(json, JDBC_HOSTPORTS), createModel);
replaceDataSourceInfoByCreateModel(param, "others", JsonUtils.getStrFromJson(json, "others"), createModel);
} else if (DataSourceType.IMPALA.getVal().equals(sourceType)) {
String tableLocation = param.getString(TableLocationType.key());
replaceDataSourceInfoByCreateModel(param, "dataSourceType", DataSourceType.IMPALA.getVal(), createModel);
String hadoopConfig = JsonUtils.getStrFromJson(json, HADOOP_CONFIG);
if (StringUtils.isNotBlank(hadoopConfig)) {
replaceDataSourceInfoByCreateModel(param, HADOOP_CONFIG, JSONObject.parse(hadoopConfig), createModel);
}
if (TableLocationType.HIVE.getValue().equals(tableLocation)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, JDBC_USERNAME), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, JDBC_PASSWORD), createModel);
replaceDataSourceInfoByCreateModel(param, "defaultFS", JsonUtils.getStrFromJson(json, HDFS_DEFAULTFS), createModel);
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
replaceDataSourceInfoByCreateModel(conn, "jdbcUrl", JsonUtils.getStrFromJson(json, JDBC_URL), createModel);
}
}
} else if (DataSourceType.INCEPTOR.getVal().equals(sourceType)) {
replaceInceptorDataSource(param, json, createModel, source, tenantId);
} else if (DataSourceType.INFLUXDB.getVal().equals(sourceType)) {
replaceDataSourceInfoByCreateModel(param, "username", JsonUtils.getStrFromJson(json, "username"), createModel);
replaceDataSourceInfoByCreateModel(param, "password", JsonUtils.getStrFromJson(json, "password"), createModel);
if (param.containsKey("connection")) {
JSONObject conn = param.getJSONArray("connection").getJSONObject(0);
String url = JsonUtils.getStrFromJson(json, "url");
replaceDataSourceInfoByCreateModel(conn, "url", Lists.newArrayList(url), createModel);
replaceDataSourceInfoByCreateModel(conn, "measurement", conn.getJSONArray("table"), createModel);
replaceDataSourceInfoByCreateModel(conn, "database", conn.getString("schema"), createModel);
}
}
}
}
use of com.dtstack.dtcenter.loader.source.DataSourceType in project Taier by DTStack.
the class BatchHadoopJobExeService method readyForSyncImmediatelyJob.
@Override
public Map<String, Object> readyForSyncImmediatelyJob(BatchTask batchTask, Long tenantId, Boolean isRoot) {
if (!batchTask.getTaskType().equals(EScheduleJobType.SYNC.getVal())) {
throw new RdosDefineException("只支持同步任务直接运行");
}
Map<String, Object> actionParam = Maps.newHashMap();
try {
String taskParams = batchTask.getTaskParams();
List<BatchTaskParam> taskParamsToReplace = batchTaskParamService.getTaskParam(batchTask.getId());
JSONObject syncJob = JSON.parseObject(Base64Util.baseDecode(batchTask.getSqlText()));
taskParams = replaceSyncParll(taskParams, parseSyncChannel(syncJob));
String job = syncJob.getString("job");
// 向导模式根据job中的sourceId填充数据源信息,保证每次运行取到最新的连接信息
job = datasourceService.setJobDataSourceInfo(job, tenantId, syncJob.getIntValue("createModel"));
batchTaskParamService.checkParams(batchTaskParamService.checkSyncJobParams(job), taskParamsToReplace);
String name = "run_sync_task_" + batchTask.getName() + "_" + System.currentTimeMillis();
String taskExeArgs = String.format(JOB_ARGS_TEMPLATE, name, job);
actionParam.put("taskSourceId", batchTask.getId());
actionParam.put("taskType", EScheduleJobType.SYNC.getVal());
actionParam.put("name", name);
actionParam.put("computeType", batchTask.getComputeType());
actionParam.put("sqlText", "");
actionParam.put("taskParams", taskParams);
actionParam.put("tenantId", tenantId);
actionParam.put("sourceType", SourceType.TEMP_QUERY.getType());
actionParam.put("isFailRetry", false);
actionParam.put("maxRetryNum", 0);
actionParam.put("job", job);
actionParam.put("taskParamsToReplace", JSON.toJSONString(taskParamsToReplace));
DataSourceType writerDataSourceType = getSyncJobWriterDataSourceType(job);
if (Objects.nonNull(writerDataSourceType)) {
actionParam.put("dataSourceType", writerDataSourceType.getVal());
}
if (Objects.nonNull(taskExeArgs)) {
actionParam.put("exeArgs", taskExeArgs);
}
} catch (Exception e) {
throw new RdosDefineException(String.format("创建数据同步job失败: %s", e.getMessage()), e);
}
return actionParam;
}
Aggregations