Search in sources :

Example 66 with RdosDefineException

use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.

the class ScheduleActionService method dealActionParam.

private void dealActionParam(Map<String, Object> actionParam, ScheduleTaskShade batchTask, ScheduleJob scheduleJob) throws Exception {
    IPipeline pipeline = null;
    String pipelineConfig = null;
    if (actionParam.containsKey(PipelineBuilder.pipelineKey)) {
        pipelineConfig = (String) actionParam.get(PipelineBuilder.pipelineKey);
        pipeline = PipelineBuilder.buildPipeline(pipelineConfig);
    } else if (EScheduleJobType.SPARK_SQL.getType().equals(batchTask.getTaskType())) {
        pipeline = PipelineBuilder.buildDefaultSqlPipeline();
    } else if (EScheduleJobType.SYNC.getType().equals(batchTask.getTaskType())) {
        pipeline = syncOperatorPipeline;
    }
    if (pipeline == null) {
        throw new RdosDefineException(ErrorCode.CONFIG_ERROR);
    }
    List<ScheduleTaskParamShade> taskParamsToReplace = JSONObject.parseArray((String) actionParam.get("taskParamsToReplace"), ScheduleTaskParamShade.class);
    Map<String, Object> pipelineInitMap = PipelineBuilder.getPipelineInitMap(pipelineConfig, scheduleJob, batchTask, taskParamsToReplace, (uploadPipelineMap) -> {
        // fill 文件上传的信息
        JSONObject pluginInfo = clusterService.pluginInfoJSON(batchTask.getTenantId(), batchTask.getTaskType(), null, null);
        String hdfsTypeName = componentService.buildHdfsTypeName(batchTask.getTenantId(), null);
        pluginInfo.put(ConfigConstant.TYPE_NAME_KEY, hdfsTypeName);
        uploadPipelineMap.put(UploadParamPipeline.pluginInfoKey, pluginInfo);
        uploadPipelineMap.put(UploadParamPipeline.fileUploadPathKey, environmentContext.getHdfsTaskPath());
    });
    pipeline.execute(actionParam, pipelineInitMap);
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) ScheduleTaskParamShade(com.dtstack.taier.dao.dto.ScheduleTaskParamShade) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) JSONObject(com.alibaba.fastjson.JSONObject) IPipeline(com.dtstack.taier.scheduler.server.pipeline.IPipeline)

Example 67 with RdosDefineException

use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.

the class SyncOperatorPipeline method buildDataSourcePluginInfo.

/**
 * 拼接数据源的连接信息
 * hive 需要判断是否开启了kerberos
 *
 * @param sourceType
 * @param username
 * @param password
 * @param jdbcUrl
 * @return
 */
private JSONObject buildDataSourcePluginInfo(JSONObject hadoopConfig, Integer sourceType, String username, String password, String jdbcUrl) {
    JSONObject pluginInfo = new JSONObject();
    // 解析jdbcUrl中的database,将数据库名称替换成default,防止数据库不存在报 NoSuchDatabaseException
    try {
        String jdbcUrlStr = jdbcUrl;
        if (jdbcUrl.contains(";")) {
            // 是开启了kerberos的url
            jdbcUrlStr = jdbcUrl.substring(0, jdbcUrl.indexOf(";"));
        }
        String realDataBase = jdbcUrlStr.substring(jdbcUrlStr.lastIndexOf("/") + 1);
        String newJdbcUrl = jdbcUrl.replaceFirst(realDataBase, "default");
        pluginInfo.put("realDataBase", realDataBase);
        pluginInfo.put(ConfigConstant.JDBCURL, newJdbcUrl);
    } catch (Exception e) {
        // 替换database异常,则走原来逻辑
        pluginInfo.put(ConfigConstant.JDBCURL, jdbcUrl);
    }
    pluginInfo.put(ConfigConstant.USERNAME, username);
    pluginInfo.put(ConfigConstant.PASSWORD, password);
    pluginInfo.put(ConfigConstant.TYPE_NAME_KEY, getHiveTypeName(DataSourceType.getSourceType(sourceType)));
    if (null == hadoopConfig) {
        return pluginInfo;
    }
    boolean isOpenKerberos = ConfigConstant.KERBEROS.equalsIgnoreCase(hadoopConfig.getString("hadoop.security.authentication")) || ConfigConstant.KERBEROS.equalsIgnoreCase(hadoopConfig.getString("hive.server2.authentication")) || ConfigConstant.KERBEROS.equalsIgnoreCase(hadoopConfig.getString("hive.server.authentication"));
    if (isOpenKerberos) {
        // 开启了kerberos 用数据同步中job 中配置项
        pluginInfo.put(ConfigConstant.OPEN_KERBEROS, Boolean.TRUE.toString());
        String remoteDir = hadoopConfig.getString(ConfigConstant.REMOTE_DIR);
        if (StringUtils.isBlank(remoteDir)) {
            throw new RdosDefineException(" data synchronization task hadoopConfig remoteDir field cannot be empty");
        }
        pluginInfo.put(ConfigConstant.REMOTE_DIR, remoteDir);
        String principalFile = hadoopConfig.getString(ConfigConstant.PRINCIPAL_FILE);
        if (StringUtils.isBlank(principalFile)) {
            throw new RdosDefineException(" data synchronization hadoopConfig principalFile field cannot be empty");
        }
        pluginInfo.put(ConfigConstant.PRINCIPAL_FILE, principalFile);
        pluginInfo.putIfAbsent(ConfigConstant.PRINCIPAL, hadoopConfig.getString(ConfigConstant.PRINCIPAL));
        JSONObject sftpConf = hadoopConfig.getJSONObject(EComponentType.SFTP.getConfName());
        if (null == sftpConf || sftpConf.size() <= 0) {
            throw new RdosDefineException(" data synchronization hadoopConfig sftpConf field cannot be empty");
        }
        pluginInfo.put(EComponentType.SFTP.getConfName(), sftpConf);
        // krb5.conf的文件名
        String krb5Conf = hadoopConfig.getString(ConfigConstant.JAVA_SECURITY_KRB5_CONF);
        if (StringUtils.isBlank(krb5Conf)) {
            // 平台不传 暂时设置默认值
            krb5Conf = ConfigConstant.KRB5_CONF;
        }
        pluginInfo.put(ConfigConstant.KRB_NAME, krb5Conf);
        pluginInfo.put(EComponentType.YARN.getConfName(), hadoopConfig);
    }
    return pluginInfo;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException)

Example 68 with RdosDefineException

use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.

the class SyncOperatorPipeline method createPartition.

/**
 * 创建分区
 */
public String createPartition(Long tenantId, String job, Integer sourceType) {
    JSONObject jobJSON = JSONObject.parseObject(job);
    JSONObject jobObj = jobJSON.getJSONObject("job");
    JSONObject parameter = jobObj.getJSONArray("content").getJSONObject(0).getJSONObject("writer").getJSONObject("parameter");
    if (parameter.containsKey("partition") && parameter.containsKey("connection")) {
        JSONObject connection = parameter.getJSONArray("connection").getJSONObject(0);
        String username = parameter.containsKey(ConfigConstant.USERNAME) ? parameter.getString(ConfigConstant.USERNAME) : "";
        String password = parameter.containsKey(ConfigConstant.PASSWORD) ? parameter.getString(ConfigConstant.PASSWORD) : "";
        String jdbcUrl = connection.getString(ConfigConstant.JDBCURL);
        String table = connection.getJSONArray("table").getString(0);
        String partition = parameter.getString("partition");
        Map<String, String> split = new HashMap<>();
        // (etl_date='2020-09-17'/etl_hour='23')
        if (StringUtils.countMatches(partition, "/") == 1 && StringUtils.countMatches(partition, "=") == 1) {
            // pt=2020/04 分区中带/
            String[] splits = partition.split("=");
            split.put(splits[0], splits[1]);
        } else {
            // pt='asdfasd'/ds='1231231' 2级分区
            split = Splitter.on("/").withKeyValueSeparator("=").split(partition);
        }
        Map<String, String> formattedMap = new HashMap<>();
        for (Map.Entry<String, String> entry : split.entrySet()) {
            String value = entry.getValue();
            String key = entry.getKey();
            if (value.startsWith("'") || value.startsWith("\"")) {
                value = value.substring(1);
            }
            if (value.endsWith("'") || value.endsWith("\"")) {
                value = value.substring(0, value.length() - 1);
            }
            formattedMap.put(key, value);
        }
        // fileName  需要处理引号
        parameter.put("fileName", partition);
        String join = Joiner.on("',").withKeyValueSeparator("='").join(formattedMap);
        partition = join + "'";
        String sql = String.format("alter table %s add if not exists partition (%s)", table, partition);
        try {
            RetryUtil.executeWithRetry(() -> {
                LOGGER.info("create partition tenantId {} {}", tenantId, sql);
                JSONObject pluginInfo = buildDataSourcePluginInfo(parameter.getJSONObject("hadoopConfig"), sourceType, username, password, jdbcUrl);
                String realDataBase = pluginInfo.getString("realDataBase");
                pluginInfo.put(ConfigConstant.TYPE_NAME_KEY, getHiveTypeName(DataSourceType.getSourceType(sourceType)));
                workerOperator.executeQuery(pluginInfo.toJSONString(), sql, null != realDataBase ? realDataBase : "");
                cleanFileName(parameter);
                return null;
            }, environmentContext.getRetryFrequency(), environmentContext.getRetryInterval(), false, null);
        } catch (Exception e) {
            LOGGER.error("create partition error:", e);
            throw new RdosDefineException("create partition error:" + ExceptionUtil.getErrorMessage(e));
        }
    }
    return jobJSON.toJSONString();
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException)

Example 69 with RdosDefineException

use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.

the class UploadParamPipeline method pipeline.

@Override
public void pipeline(Map<String, Object> actionParam, Map<String, Object> pipelineParam) throws RdosDefineException {
    if (pipelineParam.containsKey(pipelineKey)) {
        return;
    }
    ScheduleTaskShade taskShade = (ScheduleTaskShade) pipelineParam.get(taskShadeKey);
    if (null == taskShade) {
        throw new RdosDefineException("upload param pipeline task shade can not be null");
    }
    ScheduleJob scheduleJob = (ScheduleJob) pipelineParam.get(scheduleJobKey);
    if (null == scheduleJob) {
        throw new RdosDefineException("upload param pipeline schedule job can not be null");
    }
    String fileUploadPath = (String) pipelineParam.get(fileUploadPathKey);
    if (StringUtils.isBlank(fileUploadPath)) {
        throw new RdosDefineException("upload param pipeline fileUploadPath can not be null");
    }
    WorkerOperator workerOperator = (WorkerOperator) pipelineParam.get(workOperatorKey);
    if (null == workerOperator) {
        throw new RdosDefineException("upload param pipeline workerOperator can not be null");
    }
    JSONObject pluginInfo = (JSONObject) pipelineParam.get(pluginInfoKey);
    if (null == pluginInfo) {
        throw new RdosDefineException("upload param pipeline pluginInfo can not be null");
    }
    @SuppressWarnings("unchecked") List<ScheduleTaskParamShade> taskParamShades = (List) pipelineParam.get(taskParamsToReplaceKey);
    String uploadPath = this.uploadSqlTextToHdfs((String) actionParam.get("sqlText"), taskShade.getTaskType(), taskShade.getName(), taskShade.getTenantId(), 0L, taskParamShades, scheduleJob.getCycTime(), fileUploadPath, pluginInfo, workerOperator, scheduleJob.getJobId());
    pipelineParam.put(pipelineKey, uploadPath);
}
Also used : ScheduleJob(com.dtstack.taier.dao.domain.ScheduleJob) JSONObject(com.alibaba.fastjson.JSONObject) ScheduleTaskParamShade(com.dtstack.taier.dao.dto.ScheduleTaskParamShade) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) List(java.util.List) WorkerOperator(com.dtstack.taier.scheduler.WorkerOperator) ScheduleTaskShade(com.dtstack.taier.dao.domain.ScheduleTaskShade)

Example 70 with RdosDefineException

use of com.dtstack.taier.common.exception.RdosDefineException in project Taier by DTStack.

the class ClusterService method buildDeployMode.

private JSONObject buildDeployMode(JSONObject clusterConfigJson, EComponentType componentType, Long clusterId, Integer deployMode) {
    JSONObject pluginInfo;
    // 默认为session
    EDeployMode deploy = EComponentType.FLINK.equals(componentType) ? EDeployMode.SESSION : EDeployMode.PERJOB;
    // spark 暂时全部为perjob
    if (Objects.nonNull(deployMode) && !EComponentType.SPARK.equals(componentType)) {
        deploy = EDeployMode.getByType(deployMode);
    }
    JSONObject confConfig = null;
    if (EComponentType.FLINK.equals(componentType) && EDeployMode.STANDALONE.getType().equals(deployMode)) {
        confConfig = clusterConfigJson.getJSONObject(EComponentType.FLINK.getConfName());
        return confConfig;
    } else {
        confConfig = clusterConfigJson.getJSONObject(componentType.getConfName());
    }
    pluginInfo = confConfig.getJSONObject(deploy.getMode());
    if (Objects.isNull(pluginInfo)) {
        throw new RdosDefineException(String.format("Corresponding mode [%s] no information is configured", deploy.name()));
    }
    String typeName = confConfig.getString(TYPE_NAME);
    if (!StringUtils.isBlank(typeName)) {
        pluginInfo.put(TYPE_NAME_KEY, typeName);
    }
    if (EComponentType.SPARK.equals(componentType)) {
        JSONObject sftpConfig = clusterConfigJson.getJSONObject(EComponentType.SFTP.getConfName());
        if (Objects.nonNull(sftpConfig)) {
            String confHdfsPath = sftpConfig.getString("path") + File.separator + componentService.buildConfRemoteDir(clusterId);
            pluginInfo.put("confHdfsPath", confHdfsPath);
        }
    }
    return pluginInfo;
}
Also used : JSONObject(com.alibaba.fastjson.JSONObject) RdosDefineException(com.dtstack.taier.common.exception.RdosDefineException) EDeployMode(com.dtstack.taier.pluginapi.enums.EDeployMode)

Aggregations

RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)176 JSONObject (com.alibaba.fastjson.JSONObject)80 IOException (java.io.IOException)24 ArrayList (java.util.ArrayList)20 EComponentType (com.dtstack.taier.common.enums.EComponentType)18 List (java.util.List)18 JSONArray (com.alibaba.fastjson.JSONArray)17 File (java.io.File)16 DtCenterDefException (com.dtstack.taier.common.exception.DtCenterDefException)15 Transactional (org.springframework.transaction.annotation.Transactional)15 BatchTask (com.dtstack.taier.dao.domain.BatchTask)14 ScheduleJob (com.dtstack.taier.dao.domain.ScheduleJob)13 Map (java.util.Map)13 ISourceDTO (com.dtstack.dtcenter.loader.dto.source.ISourceDTO)10 Component (com.dtstack.taier.dao.domain.Component)10 Resource (com.dtstack.taier.dao.dto.Resource)10 HashMap (java.util.HashMap)10 CollectionUtils (org.apache.commons.collections.CollectionUtils)10 ErrorCode (com.dtstack.taier.common.exception.ErrorCode)9 BatchCatalogue (com.dtstack.taier.dao.domain.BatchCatalogue)9