Search in sources :

Example 6 with ParamAction

use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.

the class GroupPriorityQueue method emitJob2PriorityQueue.

/**
 * @return false: blocked | true: unblocked
 */
private boolean emitJob2PriorityQueue() {
    boolean empty = false;
    String localAddress = "";
    try {
        if (priorityQueueSize() >= getQueueSizeLimited()) {
            return false;
        }
        localAddress = environmentContext.getLocalAddress();
        long startId = 0L;
        outLoop: while (true) {
            List<ScheduleEngineJobCache> jobCaches = engineJobCacheService.listByStage(startId, localAddress, EJobCacheStage.DB.getStage(), jobResource);
            if (CollectionUtils.isEmpty(jobCaches)) {
                empty = true;
                break;
            }
            for (ScheduleEngineJobCache jobCache : jobCaches) {
                try {
                    ParamAction paramAction = PublicUtil.jsonStrToObject(jobCache.getJobInfo(), ParamAction.class);
                    JobClient jobClient = new JobClient(paramAction);
                    jobClient.setCallBack((jobStatus) -> {
                        jobDealer.updateJobStatus(jobClient.getJobId(), jobStatus);
                    });
                    boolean addInner = this.addInner(jobClient, false);
                    LOGGER.info("jobId:{} load from db, {} emit job to queue.", jobClient.getJobId(), addInner ? "success" : "failed");
                    if (!addInner) {
                        empty = false;
                        break outLoop;
                    }
                    startId = jobCache.getId();
                } catch (Exception e) {
                    LOGGER.error("", e);
                    // 数据转换异常--打日志
                    jobDealer.dealSubmitFailJob(jobCache.getJobId(), "This task stores information exception and cannot be converted." + e.toString());
                }
            }
        }
    } catch (Exception e) {
        LOGGER.error("emitJob2PriorityQueue localAddress:{} error:", localAddress, e);
    }
    if (empty) {
        blocked.set(false);
    }
    return empty;
}
Also used : Logger(org.slf4j.Logger) java.util.concurrent(java.util.concurrent) LoggerFactory(org.slf4j.LoggerFactory) EnvironmentContext(com.dtstack.taier.common.env.EnvironmentContext) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) StringUtils(org.apache.commons.lang3.StringUtils) ApplicationContext(org.springframework.context.ApplicationContext) EngineJobCacheService(com.dtstack.taier.scheduler.service.EngineJobCacheService) EJobCacheStage(com.dtstack.taier.common.enums.EJobCacheStage) CustomThreadFactory(com.dtstack.taier.pluginapi.CustomThreadFactory) ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) JobClient(com.dtstack.taier.pluginapi.JobClient) PublicUtil(com.dtstack.taier.pluginapi.util.PublicUtil) List(java.util.List) JobClientComparator(com.dtstack.taier.common.queue.comparator.JobClientComparator) JobDealer(com.dtstack.taier.scheduler.jobdealer.JobDealer) CollectionUtils(org.apache.commons.collections.CollectionUtils) ScheduleEngineJobCache(com.dtstack.taier.dao.domain.ScheduleEngineJobCache) WorkerOperator(com.dtstack.taier.scheduler.WorkerOperator) JobSubmitDealer(com.dtstack.taier.scheduler.jobdealer.JobSubmitDealer) JobPartitioner(com.dtstack.taier.scheduler.server.JobPartitioner) ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) List(java.util.List) JobClient(com.dtstack.taier.pluginapi.JobClient) ScheduleEngineJobCache(com.dtstack.taier.dao.domain.ScheduleEngineJobCache)

Example 7 with ParamAction

use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.

the class NodeRecoverService method recoverJobCaches.

public void recoverJobCaches() {
    String localAddress = environmentContext.getLocalAddress();
    try {
        long startId = 0L;
        while (true) {
            List<ScheduleEngineJobCache> jobCaches = engineJobCacheService.listByStage(startId, localAddress, EJobCacheStage.SUBMITTED.getStage(), null);
            if (CollectionUtils.isEmpty(jobCaches)) {
                break;
            }
            List<JobClient> afterJobClients = new ArrayList<>(jobCaches.size());
            for (ScheduleEngineJobCache jobCache : jobCaches) {
                try {
                    ParamAction paramAction = PublicUtil.jsonStrToObject(jobCache.getJobInfo(), ParamAction.class);
                    JobClient jobClient = new JobClient(paramAction);
                    afterJobClients.add(jobClient);
                    startId = jobCache.getId();
                } catch (Exception e) {
                    LOGGER.error("", e);
                    // 数据转换异常--打日志
                    jobDealer.dealSubmitFailJob(jobCache.getJobId(), "This task stores information exception and cannot be converted." + ExceptionUtil.getErrorMessage(e));
                }
            }
            if (CollectionUtils.isNotEmpty(afterJobClients)) {
                jobDealer.afterSubmitJobVast(afterJobClients);
            }
        }
    } catch (Exception e) {
        LOGGER.error("----broker:{} RecoverDealer error:", localAddress, e);
    }
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) ArrayList(java.util.ArrayList) JobClient(com.dtstack.taier.pluginapi.JobClient) ScheduleEngineJobCache(com.dtstack.taier.dao.domain.ScheduleEngineJobCache)

Example 8 with ParamAction

use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.

the class HadoopClient method main.

public static void main(String[] args) throws Exception {
    FileInputStream fileInputStream = null;
    InputStreamReader inputStreamReader = null;
    BufferedReader reader = null;
    try {
        System.setProperty("HADOOP_USER_NAME", "admin");
        // input params json file path
        String filePath = args[0];
        File paramsFile = new File(filePath);
        fileInputStream = new FileInputStream(paramsFile);
        inputStreamReader = new InputStreamReader(fileInputStream);
        reader = new BufferedReader(inputStreamReader);
        String request = reader.readLine();
        Map params = PublicUtil.jsonStrToObject(request, Map.class);
        ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
        JobClient jobClient = new JobClient(paramAction);
        String pluginInfo = jobClient.getPluginInfo();
        Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
        String md5plugin = MD5Util.getMd5String(pluginInfo);
        properties.setProperty("md5sum", md5plugin);
        HadoopClient client = new HadoopClient();
        client.init(properties);
        ClusterResource clusterResource = client.getClusterResource();
        LOG.info("submit success!");
        LOG.info(clusterResource.toString());
        System.exit(0);
    } catch (Exception e) {
        LOG.error("submit error!", e);
    } finally {
        if (reader != null) {
            reader.close();
            inputStreamReader.close();
            fileInputStream.close();
        }
    }
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) JobClient(com.dtstack.taier.pluginapi.JobClient) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource)

Example 9 with ParamAction

use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.

the class DtYarnClient method main.

public static void main(String[] args) throws Exception {
    System.setProperty("HADOOP_USER_NAME", "admin");
    // input params json file path
    String filePath = args[0];
    File paramsFile = new File(filePath);
    BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(paramsFile)));
    String request = reader.readLine();
    Map params = PublicUtil.jsonStrToObject(request, Map.class);
    ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
    JobClient jobClient = new JobClient(paramAction);
    String pluginInfo = jobClient.getPluginInfo();
    Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
    String md5plugin = MD5Util.getMd5String(pluginInfo);
    properties.setProperty("md5sum", md5plugin);
    DtYarnClient client = new DtYarnClient();
    client.init(properties);
    ClusterResource clusterResource = client.getClusterResource();
    LOG.info("submit success!");
    LOG.info(clusterResource.toString());
    System.exit(0);
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) Properties(java.util.Properties) File(java.io.File) Map(java.util.Map) JobClient(com.dtstack.taier.pluginapi.JobClient) FileInputStream(java.io.FileInputStream) ClusterResource(com.dtstack.taier.pluginapi.pojo.ClusterResource)

Example 10 with ParamAction

use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.

the class YarnMockUtil method mockJobClient.

public static JobClient mockJobClient(String jobType, String sqlNewText, String jarPath) throws Exception {
    String taskId = "9999";
    String sqlText = "ADD JAR WITH /data/sftp/21_window_WindowJoin.jar AS dtstack.WindowJoin";
    if (StringUtils.isNotEmpty(sqlNewText)) {
        sqlText = sqlNewText;
    }
    ParamAction paramAction = new ParamAction();
    if ("perJob".equalsIgnoreCase(jobType)) {
        paramAction.setTaskType(0);
        paramAction.setComputeType(0);
    } else {
        paramAction.setTaskType(1);
        paramAction.setComputeType(1);
    }
    paramAction.setJobId(taskId);
    paramAction.setSqlText(sqlText);
    paramAction.setTenantId(0L);
    paramAction.setTaskParams("{\"test\":\"test\"}");
    paramAction.setExternalPath("/tmp/savepoint");
    Map<String, Object> map = new HashMap();
    map.put("yarnConf", new HashMap());
    paramAction.setPluginInfo(map);
    JobClient jobClient = new JobClient(paramAction);
    JarFileInfo jarFileInfo = new JarFileInfo();
    jarFileInfo.setJarPath(jarPath);
    jarFileInfo.setMainClass("dtstack.WindowJoin");
    jobClient.setCoreJarInfo(jarFileInfo);
    return jobClient;
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) JarFileInfo(com.dtstack.taier.pluginapi.JarFileInfo) HashMap(java.util.HashMap) JobClient(com.dtstack.taier.pluginapi.JobClient)

Aggregations

ParamAction (com.dtstack.taier.pluginapi.pojo.ParamAction)16 JobClient (com.dtstack.taier.pluginapi.JobClient)11 ScheduleEngineJobCache (com.dtstack.taier.dao.domain.ScheduleEngineJobCache)6 ScheduleJob (com.dtstack.taier.dao.domain.ScheduleJob)3 JobIdentifier (com.dtstack.taier.pluginapi.JobIdentifier)3 ClusterResource (com.dtstack.taier.pluginapi.pojo.ClusterResource)3 IOException (java.io.IOException)3 Map (java.util.Map)3 Properties (java.util.Properties)3 JSONObject (com.alibaba.fastjson.JSONObject)2 RdosDefineException (com.dtstack.taier.common.exception.RdosDefineException)2 PluginDefineException (com.dtstack.taier.pluginapi.exception.PluginDefineException)2 JobResult (com.dtstack.taier.pluginapi.pojo.JobResult)2 EJobCacheStage (com.dtstack.taier.common.enums.EJobCacheStage)1 EnvironmentContext (com.dtstack.taier.common.env.EnvironmentContext)1 JobClientComparator (com.dtstack.taier.common.queue.comparator.JobClientComparator)1 ConsoleJobInfoVO (com.dtstack.taier.develop.vo.console.ConsoleJobInfoVO)1 ConsoleJobVO (com.dtstack.taier.develop.vo.console.ConsoleJobVO)1 CustomThreadFactory (com.dtstack.taier.pluginapi.CustomThreadFactory)1 JarFileInfo (com.dtstack.taier.pluginapi.JarFileInfo)1