use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class JobSubmitDealer method submitJob.
private void submitJob(JobClient jobClient) {
JobResult jobResult = null;
try {
// 判断资源
JudgeResult judgeResult = workerOperator.judgeSlots(jobClient);
if (JudgeResult.JudgeType.OK == judgeResult.getResult()) {
LOGGER.info("jobId:{} taskType:{} submit to engine start.", jobClient.getJobId(), jobClient.getTaskType());
jobClient.doStatusCallBack(TaskStatus.COMPUTING.getStatus());
// 提交任务
jobResult = workerOperator.submitJob(jobClient);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("jobId:{} taskType:{} submit jobResult:{}.", jobClient.getJobId(), jobClient.getTaskType(), jobResult);
}
jobClient.setEngineTaskId(jobResult.getData(JobResult.EXT_ID_KEY));
jobClient.setApplicationId(jobResult.getData(JobResult.JOB_ID_KEY));
addToTaskListener(jobClient, jobResult);
LOGGER.info("jobId:{} taskType:{} submit to engine end.", jobClient.getJobId(), jobClient.getTaskType());
} else if (JudgeResult.JudgeType.LIMIT_ERROR == judgeResult.getResult()) {
LOGGER.info("jobId:{} taskType:{} submitJob happens system limitError:{}", jobClient.getJobId(), jobClient.getTaskType(), judgeResult.getReason());
jobClient.setEngineTaskId(null);
jobResult = JobResult.createErrorResult(false, judgeResult.getReason());
addToTaskListener(jobClient, jobResult);
} else if (JudgeResult.JudgeType.EXCEPTION == judgeResult.getResult()) {
LOGGER.info("jobId:{} taskType:{} judgeSlots result is exception {}", jobClient.getJobId(), jobClient.getTaskType(), judgeResult.getReason());
handlerFailedWithRetry(jobClient, true, new Exception(judgeResult.getReason()));
} else {
LOGGER.info("jobId:{} taskType:{} judgeSlots result is false.", jobClient.getJobId(), jobClient.getTaskType());
handlerNoResource(jobClient, judgeResult);
}
} catch (WorkerAccessException e) {
LOGGER.info(" jobId:{} taskType:{} worker not find.", jobClient.getJobId(), jobClient.getTaskType());
handlerNoResource(jobClient, workerNotFindResult);
} catch (ClientAccessException | ClientArgumentException e) {
handlerFailedWithRetry(jobClient, false, e);
} catch (Throwable e) {
handlerFailedWithRetry(jobClient, true, e);
}
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class SparkYarnClient method processSubmitJobWithType.
@Override
protected JobResult processSubmitJobWithType(JobClient jobClient) {
try {
return KerberosUtils.login(sparkYarnConfig, () -> {
EJobType jobType = jobClient.getJobType();
JobResult jobResult = null;
if (EJobType.MR.equals(jobType)) {
jobResult = submitJobWithJar(jobClient);
} else if (EJobType.SQL.equals(jobType)) {
jobResult = submitSqlJob(jobClient);
} else if (EJobType.PYTHON.equals(jobType)) {
jobResult = submitPythonJob(jobClient);
}
return jobResult;
}, yarnConf, true);
} catch (Exception e) {
logger.info("", e);
return JobResult.createErrorResult("submit job get unknown error\n" + ExceptionUtil.getErrorMessage(e));
}
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class FlinkClientTest method testCancelJob.
/*@Test
public void testBeforeSubmitFunc() throws Exception {
String absolutePath = temporaryFolder.newFile("21_window_WindowJoin.jar").getAbsolutePath();
JobClient jobClient = YarnMockUtil.mockJobClient("session", absolutePath);
FlinkConfig flinkConfig = new FlinkConfig();
Map<String, String> map = new HashMap<>();
map.put("test", "test");
// flinkConfig.setSftpConf();
MemberModifier.field(FlinkClient.class, "flinkConfig")
.set(flinkClient, flinkConfig);
MemberModifier.field(FlinkClient.class, "cacheFile")
.set(flinkClient, Maps.newConcurrentMap());
MemberModifier.field(FlinkClient.class, "hadoopConf")
.set(flinkClient, new HadoopConf());
flinkClient.beforeSubmitFunc(jobClient);
}*/
@Test
public void testCancelJob() throws Exception {
String jobId = "40c01cd0c53928fff6a55e8d8b8b022c";
String appId = "application_1594003499276_1278";
String taskId = "taskId";
JobIdentifier jobIdentifier = JobIdentifier.createInstance(jobId, appId, taskId);
ClusterClient clusterClient = YarnMockUtil.mockClusterClient();
when(flinkClusterClientManager.getClusterClient(null)).thenReturn(clusterClient);
JobResult jobResult = flinkClient.cancelJob(jobIdentifier);
Assert.assertNotNull(jobResult);
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class HadoopClient method cancelJob.
@Override
public JobResult cancelJob(JobIdentifier jobIdentifier) {
try {
return KerberosUtils.login(config, () -> {
String jobId = jobIdentifier.getEngineJobId();
try {
getYarnClient().killApplication(generateApplicationId(jobId));
} catch (YarnException | IOException e) {
return JobResult.createErrorResult(e);
}
JobResult jobResult = JobResult.newInstance(false);
jobResult.setData("jobid", jobId);
return jobResult;
}, conf);
} catch (Exception e) {
LOG.error("cancelJob error:", e);
return JobResult.createErrorResult(e);
}
}
use of com.dtstack.taier.pluginapi.pojo.JobResult in project Taier by DTStack.
the class AbstractRdbsClientTest method testCancelJob.
@Test
public void testCancelJob() throws Exception {
RdbsExeQueue rdbsExeQueue = PowerMockito.mock(RdbsExeQueue.class);
when(rdbsExeQueue.cancelJob(any(String.class))).thenReturn(true);
MemberModifier.field(TestRdbsClient.class, "exeQueue").set(testRdbsClient, rdbsExeQueue);
JobIdentifier jobIdentifier = JobIdentifier.createInstance("test", "test", "test");
JobResult jobResult = testRdbsClient.cancelJob(jobIdentifier);
Assert.assertNotNull(jobResult);
}
Aggregations