use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method columnForSyncopate.
/**
* 返回切分键需要的列名
* <p>
* 只支持关系型数据库 mysql\oracle\sqlserver\postgresql 的整型数据类型
* 也不支持其他数据库。
* 如果指定了不支持的类型,则忽略切分键功能,使用单通道进行同步。
*
* @param userId
* @param sourceId
* @param tableName
* @return
*/
public Set<JSONObject> columnForSyncopate(Long userId, Long sourceId, String tableName, String schema) {
BatchDataSource source = getOne(sourceId);
if (Objects.isNull(RDBMSSourceType.getByDataSourceType(source.getType())) && !DataSourceType.INFLUXDB.getVal().equals(source.getType())) {
LOGGER.error("切分键只支关系型数据库");
throw new RdosDefineException("切分键只支持关系型数据库");
}
if (StringUtils.isEmpty(tableName)) {
return new HashSet<>();
}
final StringBuffer newTableName = new StringBuffer();
if (DataSourceType.SQLServer.getVal().equals(source.getType()) && StringUtils.isNotBlank(tableName)) {
if (tableName.indexOf("[") == -1) {
final String[] tableNames = tableName.split("\\.");
for (final String name : tableNames) {
newTableName.append("[").append(name).append("]").append(".");
}
tableName = newTableName.substring(0, newTableName.length() - 1);
}
}
final List<JSONObject> tablecolumn = this.getTableColumn(source, tableName, schema);
if (CollectionUtils.isNotEmpty(tablecolumn)) {
List<String> numbers;
if (DataSourceType.MySQL.getVal().equals(source.getType()) || DataSourceType.Polardb_For_MySQL.getVal().equals(source.getType()) || DataSourceType.TiDB.getVal().equals(source.getType())) {
numbers = MYSQL_NUMBERS;
} else if (DataSourceType.Oracle.getVal().equals(source.getType())) {
numbers = ORACLE_NUMBERS;
} else if (DataSourceType.SQLServer.getVal().equals(source.getType())) {
numbers = SQLSERVER_NUMBERS;
} else if (DataSourceType.PostgreSQL.getVal().equals(source.getType()) || DataSourceType.ADB_FOR_PG.getVal().equals(source.getType())) {
numbers = POSTGRESQL_NUMBERS;
} else if (DataSourceType.DB2.getVal().equals(source.getType())) {
numbers = DB2_NUMBERS;
} else if (DataSourceType.GBase_8a.getVal().equals(source.getType())) {
numbers = GBASE_NUMBERS;
} else if (DataSourceType.Clickhouse.getVal().equals(source.getType())) {
numbers = CLICKHOUSE_NUMBERS;
} else if (DataSourceType.DMDB.getVal().equals(source.getType())) {
numbers = DMDB_NUMBERS;
} else if (DataSourceType.GREENPLUM6.getVal().equals(source.getType())) {
numbers = GREENPLUM_NUMBERS;
} else if (DataSourceType.KINGBASE8.getVal().equals(source.getType())) {
numbers = KINGBASE_NUMBERS;
} else if (DataSourceType.INFLUXDB.getVal().equals(source.getType())) {
numbers = INFLUXDB_NUMBERS;
} else {
throw new RdosDefineException("切分键只支持关系型数据库");
}
Map<JSONObject, String> twinsMap = new LinkedHashMap<>(tablecolumn.size() + 1);
for (JSONObject twins : tablecolumn) {
twinsMap.put(twins, twins.getString(TYPE));
}
Iterator<Map.Entry<JSONObject, String>> iterator = twinsMap.entrySet().iterator();
while (iterator.hasNext()) {
String type = getSimpleType(iterator.next().getValue());
if (numbers.contains(type.toUpperCase())) {
continue;
}
if (source.getType().equals(DataSourceType.Oracle.getVal())) {
if ("number".equalsIgnoreCase(type)) {
continue;
}
Matcher numberMatcher1 = NUMBER_PATTERN.matcher(type);
Matcher numberMatcher2 = NUMBER_PATTERN2.matcher(type);
if (numberMatcher1.matches()) {
continue;
} else if (numberMatcher2.matches()) {
int floatLength = Integer.parseInt(numberMatcher2.group(2));
if (floatLength <= 0) {
continue;
}
}
}
iterator.remove();
}
// 为oracle加上默认切分键
if (source.getType().equals(DataSourceType.Oracle.getVal())) {
JSONObject keySet = new JSONObject();
keySet.put("type", "NUMBER(38,0)");
keySet.put("key", "ROW_NUMBER()");
keySet.put("comment", "");
twinsMap.put(keySet, "NUMBER(38,0)");
}
return twinsMap.keySet();
}
return Sets.newHashSet();
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method buildSparkThriftDataSourceDataJSON.
public JSONObject buildSparkThriftDataSourceDataJSON(Long clusterId, JdbcInfo jdbcInfo, String dataSourceName) {
String jdbcUrl = jdbcInfo.getJdbcUrl();
JSONObject dataJson = new JSONObject();
dataJson.put("username", jdbcInfo.getUsername());
dataJson.put("password", jdbcInfo.getPassword());
if (!jdbcUrl.contains("%s")) {
throw new RdosDefineException("控制台 " + EComponentType.SPARK_THRIFT.getName() + " URL中 不包含占位符 %s");
}
jdbcUrl = String.format(jdbcUrl, dataSourceName);
dataJson.put("jdbcUrl", jdbcUrl);
String defaultFs = HadoopConf.getDefaultFsByClusterId(clusterId);
if (StringUtils.isNotBlank(defaultFs)) {
dataJson.put("defaultFS", defaultFs);
} else {
throw new RdosDefineException("默认数据源的defaultFs未找到");
}
JSONObject hdpConfig = createHadoopConfigObject(clusterId);
if (!hdpConfig.isEmpty()) {
dataJson.put("hadoopConfig", hdpConfig.toJSONString());
}
dataJson.put("hasHdfsConfig", true);
JSONObject kerberosConfig = jdbcInfo.getKerberosConfig();
if (Objects.nonNull(kerberosConfig)) {
Map<String, String> sftpMap = getSftpMapByClusterId(clusterId);
String remotePath = kerberosConfig.getString("remotePath");
kerberosConfig.put("remotePath", remotePath.replaceAll(sftpMap.get("path"), ""));
kerberosConfig.put("hive.server2.authentication", "KERBEROS");
dataJson.put("kerberosConfig", jdbcInfo.getKerberosConfig());
}
return dataJson;
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class DatasourceService method getSyncSql.
/**
* 配置或修改离线任务
*
* @param isFilter 获取数据同步脚本时候是否进行过滤用户名密码操作
* @return
* @throws IOException
*/
public String getSyncSql(final TaskResourceParam param, boolean isFilter) {
// 来源集合
final Map<String, Object> sourceMap = param.getSourceMap();
// 目标集合
final Map<String, Object> targetMap = param.getTargetMap();
// 流控、错误集合
final Map<String, Object> settingMap = param.getSettingMap();
try {
this.setReaderJson(sourceMap, param.getId(), param.getTenantId(), isFilter);
this.setWriterJson(targetMap, param.getId(), param.getTenantId(), isFilter);
Reader reader = null;
Writer writer = null;
Setting setting = null;
final Integer sourceType = Integer.parseInt(sourceMap.get("dataSourceType").toString());
final Integer targetType = Integer.parseInt(targetMap.get("dataSourceType").toString());
if (!this.checkDataSourcePermission(sourceType, EDataSourcePermission.READ.getType())) {
throw new RdosDefineException(ErrorCode.SOURCE_CAN_NOT_AS_INPUT);
}
if (!this.checkDataSourcePermission(targetType, EDataSourcePermission.WRITE.getType())) {
throw new RdosDefineException(ErrorCode.SOURCE_CAN_NOT_AS_OUTPUT);
}
final List<Long> sourceIds = (List<Long>) sourceMap.get("sourceIds");
final List<Long> targetIds = (List<Long>) targetMap.get("sourceIds");
reader = this.syncReaderBuild(sourceType, sourceMap, sourceIds);
writer = this.syncWriterBuild(targetType, targetIds, targetMap, reader);
setting = PublicUtil.objectToObject(settingMap, DefaultSetting.class);
// 检查有效性
if (writer instanceof HiveWriter) {
final HiveWriter hiveWriter = (HiveWriter) writer;
if (!hiveWriter.isValid()) {
throw new RdosDefineException(hiveWriter.getErrMsg());
}
}
if (param.getCreateModel() == TaskCreateModelType.TEMPLATE.getType()) {
// 脚本模式直接返回
return this.getJobText(this.putDefaultEmptyValueForReader(sourceType, reader), this.putDefaultEmptyValueForWriter(targetType, writer), this.putDefaultEmptyValueForSetting(setting));
}
// 获得数据同步job.xml的配置
final String jobXml = this.getJobText(reader, writer, setting);
final String parserXml = this.getParserText(sourceMap, targetMap, settingMap);
final JSONObject sql = new JSONObject(3);
sql.put("job", jobXml);
sql.put("parser", parserXml);
sql.put("createModel", TaskCreateModelType.GUIDE.getType());
this.batchTaskParamService.checkParams(this.batchTaskParamService.checkSyncJobParams(sql.toJSONString()), param.getTaskVariables());
return sql.toJSONString();
} catch (final Exception e) {
LOGGER.error("", e);
throw new RdosDefineException("解析同步任务失败: " + e.getMessage(), ErrorCode.SERVER_EXCEPTION);
}
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class BatchTaskParamService method checkParams.
/**
* 校验任务中的 系统参数 和 自定义参数
* @param jobContent SQL内容
* @param parameterSet 任务参数
*/
public void checkParams(final String jobContent, final List parameterSet) {
// 校验任务参数不能为空参数
if (CollectionUtils.isNotEmpty(parameterSet)) {
for (Object paramObj : parameterSet) {
BatchTaskParam batchTaskParam = PublicUtil.objectToObject(paramObj, BatchTaskParam.class);
if (batchTaskParam != null) {
if (StringUtils.isBlank(batchTaskParam.getParamCommand()) || "$[]".equalsIgnoreCase(batchTaskParam.getParamCommand())) {
throw new RdosDefineException("自定义参数赋值不能为空");
}
}
}
}
String jobStr = jobContent;
if (StringUtils.isBlank(jobStr)) {
return;
}
// 校验任务参数时,先清除sql中的注释
String sqlWithoutComments = this.batchSqlExeService.removeComment(jobStr);
if (StringUtils.isNotEmpty(sqlWithoutComments)) {
sqlWithoutComments = sqlWithoutComments.replaceAll("\\s*", "");
}
// 校验任务参数时,先删除 数据同步任务 配置项
if (sqlWithoutComments.contains(FormNames.HBASE_CONFIG) || sqlWithoutComments.contains(FormNames.HADOOP_CONFIG) || sqlWithoutComments.contains(FormNames.KERBEROS_CONFIG)) {
sqlWithoutComments = removeConfig(sqlWithoutComments);
}
// 正则解析SQL中的 系统参数 和 自定义参数
Matcher matcher = PARAM_REGEX_PATTERN.matcher(sqlWithoutComments);
if (matcher.find()) {
if (CollectionUtils.isEmpty(parameterSet)) {
LOGGER.error("jobContent:{}", jobContent);
throw new RdosDefineException(ErrorCode.TASK_PARAM_CONTENT_NOT_NULL);
}
}
}
use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.
the class BatchTaskResourceService method save.
public List<BatchTaskResource> save(BatchTask batchTask, List<Long> resourceIds, Integer refType) {
List<BatchTaskResource> taskResources = new ArrayList<>(resourceIds.size());
for (Long resourceId : resourceIds) {
// 检查资源是否存在
if (batchResourceService.getResource(resourceId) == null) {
logger.warn("can't find resource from BatchResource table by id:{}", resourceId);
throw new RdosDefineException(ErrorCode.CAN_NOT_FIND_RESOURCE);
}
// 存储
BatchTaskResource resource = developTaskResourceDao.getByTaskIdAndResourceId(batchTask.getId(), resourceId, refType);
if (resource == null) {
resource = new BatchTaskResource();
}
resource.setTaskId(batchTask.getId());
resource.setTenantId(batchTask.getTenantId());
resource.setGmtCreate(Timestamp.valueOf(LocalDateTime.now()));
resource.setGmtModified(Timestamp.valueOf(LocalDateTime.now()));
resource.setResourceId(resourceId);
resource.setResourceType(refType);
taskResources.add(addOrUpdate(resource));
}
return taskResources;
}
Aggregations