use of com.dtstack.taier.pluginapi.enums.TaskStatus in project Taier by DTStack.
the class ClientOperator method getJobStatus.
public TaskStatus getJobStatus(String pluginInfo, JobIdentifier jobIdentifier) {
checkoutOperator(pluginInfo, jobIdentifier);
String jobId = jobIdentifier.getEngineJobId();
if (Strings.isNullOrEmpty(jobId) && Strings.isNullOrEmpty(jobIdentifier.getApplicationId())) {
throw new RdosDefineException("can't get job of jobId is empty or null!");
}
try {
IClient client = clientCache.getClient(pluginInfo);
Object result = client.getJobStatus(jobIdentifier);
if (result == null) {
return null;
}
return (TaskStatus) result;
} catch (Exception e) {
LOGGER.error("getStatus happens error:{}", jobId, e);
return TaskStatus.NOTFOUND;
}
}
use of com.dtstack.taier.pluginapi.enums.TaskStatus in project Taier by DTStack.
the class FlinkClient method getCheckpoints.
@Override
public String getCheckpoints(JobIdentifier jobIdentifier) {
String jobId = jobIdentifier.getJobId();
String checkpointMsg = "";
String engineJobId = jobIdentifier.getEngineJobId();
if (StringUtils.isEmpty(engineJobId)) {
logger.warn("{} getCheckpoints is null, because engineJobId is empty", jobId);
return checkpointMsg;
}
TaskStatus taskStatus = TaskStatus.NOTFOUND;
try {
String checkpointUrlPath = String.format(ConfigConstrant.JOB_CHECKPOINTS_URL_FORMAT, engineJobId);
taskStatus = getJobStatus(jobIdentifier);
Boolean isEndStatus = IS_END_STATUS.test(taskStatus);
Boolean isPerjob = EDeployMode.PERJOB.getType().equals(jobIdentifier.getDeployMode());
if (isPerjob && isEndStatus) {
checkpointMsg = getMessageFromJobArchive(engineJobId, checkpointUrlPath);
} else {
ClusterClient currClient = flinkClusterClientManager.getClusterClient(jobIdentifier);
String reqURL = currClient.getWebInterfaceURL();
checkpointMsg = getMessageByHttp(checkpointUrlPath, reqURL);
}
} catch (Exception e) {
logger.error("taskId: {}, taskStatus is {}, Get checkpoint error: ", jobId, taskStatus.name(), e);
}
logger.info("taskId: {}, getCheckpoints: {}", jobId, StringUtils.substring(checkpointMsg, 0, 200));
return checkpointMsg;
}
use of com.dtstack.taier.pluginapi.enums.TaskStatus in project Taier by DTStack.
the class FlinkClient method getJobStatus.
/**
* 直接调用rest api直接返回
* @param jobIdentifier
* @return
*/
@Override
public TaskStatus getJobStatus(JobIdentifier jobIdentifier) {
String jobId = jobIdentifier.getJobId();
String engineJobId = jobIdentifier.getEngineJobId();
String applicationId = jobIdentifier.getApplicationId();
if (StringUtils.isEmpty(engineJobId)) {
logger.warn("{} getJobStatus is NOTFOUND, because engineJobId is empty.", jobId);
return TaskStatus.NOTFOUND;
}
ClusterClient clusterClient = null;
try {
clusterClient = flinkClusterClientManager.getClusterClient(jobIdentifier);
} catch (Exception e) {
logger.error("taskId: {}, get clusterClient error:", jobId, e);
}
String jobUrlPath = String.format(ConfigConstrant.JOB_URL_FORMAT, engineJobId);
String response = null;
Exception urlException = null;
if (clusterClient != null) {
try {
String webInterfaceURL = clusterClient.getWebInterfaceURL();
String jobUrl = webInterfaceURL + jobUrlPath;
response = PoolHttpClient.get(jobUrl);
} catch (Exception e) {
urlException = e;
}
}
if (StringUtils.isEmpty(response)) {
try {
response = getMessageFromJobArchive(engineJobId, jobUrlPath);
} catch (Exception e) {
if (urlException != null) {
logger.error("taskId: {}, Get job status error from webInterface: ", jobId, urlException);
}
logger.error("taskId: {}, request job status error from jobArchive: ", jobId, e);
}
}
if (StringUtils.isEmpty(response)) {
if (StringUtils.isNotEmpty(applicationId)) {
TaskStatus taskStatus = getPerJobStatus(applicationId);
logger.info("taskId: {}, try getPerJobStatus with yarnClient, status: {}", jobId, taskStatus.name());
return taskStatus;
}
return TaskStatus.NOTFOUND;
}
try {
if (response == null) {
throw new PluginDefineException("Get status response is null");
}
Map<String, Object> statusMap = PublicUtil.jsonStrToObject(response, Map.class);
Object stateObj = statusMap.get("state");
if (stateObj == null) {
return TaskStatus.NOTFOUND;
}
String state = (String) stateObj;
state = StringUtils.upperCase(state);
return TaskStatus.getTaskStatus(state);
} catch (Exception e) {
logger.error("taskId: {}, getJobStatus error: ", jobId, e);
return TaskStatus.NOTFOUND;
}
}
use of com.dtstack.taier.pluginapi.enums.TaskStatus in project Taier by DTStack.
the class FlinkClient method getJobLog.
@Override
public String getJobLog(JobIdentifier jobIdentifier) {
String jobId = jobIdentifier.getJobId();
String engineJobId = jobIdentifier.getEngineJobId();
String exceptMessage = "";
try {
if (engineJobId == null) {
logger.error("{} getJobLog is null, because engineJobId is empty", jobId);
return handleJobLog("", "Get jogLog error, because engineJobId is null", "Job has not submitted to yarn, Please waiting moment.");
}
String exceptionUrlPath = String.format(ConfigConstrant.JOB_EXCEPTIONS_URL_FORMAT, engineJobId);
TaskStatus jobStatus = getJobStatus(jobIdentifier);
Boolean isEndStatus = IS_END_STATUS.test(jobStatus);
Boolean isPerjob = EDeployMode.PERJOB.getType().equals(jobIdentifier.getDeployMode());
if (isPerjob && isEndStatus) {
exceptMessage = getMessageFromJobArchive(engineJobId, exceptionUrlPath);
} else {
ClusterClient currClient = flinkClusterClientManager.getClusterClient(jobIdentifier);
String reqURL = currClient.getWebInterfaceURL();
exceptMessage = getMessageByHttp(exceptionUrlPath, reqURL);
}
String jobLogContent = FlinkRestParseUtil.parseEngineLog(exceptMessage);
logger.info("taskId: {}, job log content: {}", jobId, StringUtils.substring(jobLogContent, 0, 100));
return jobLogContent;
} catch (Exception e) {
logger.error("Get job log error, {}", e.getMessage());
return handleJobLog(engineJobId, ExceptionInfoConstrant.FLINK_GET_LOG_ERROR_UNDO_RESTART_EXCEPTION, ExceptionUtil.getErrorMessage(e));
}
}
use of com.dtstack.taier.pluginapi.enums.TaskStatus in project Taier by DTStack.
the class FlinkClient method cancelJob.
@Override
public JobResult cancelJob(JobIdentifier jobIdentifier) {
try {
return KerberosUtils.login(flinkConfig, () -> {
String engineJobId = jobIdentifier.getEngineJobId();
String appId = jobIdentifier.getApplicationId();
TaskStatus taskStatus = null;
try {
taskStatus = getJobStatus(jobIdentifier);
if (taskStatus != null && !TaskStatus.getStoppedStatus().contains(taskStatus.getStatus())) {
ClusterClient targetClusterClient = flinkClusterClientManager.getClusterClient(jobIdentifier);
JobID jobId = new JobID(org.apache.flink.util.StringUtils.hexStringToByte(jobIdentifier.getEngineJobId()));
Integer deployMode = jobIdentifier.getDeployMode();
if (EDeployMode.SESSION == EDeployMode.getByType(deployMode) || EDeployMode.STANDALONE == EDeployMode.getByType(deployMode)) {
// session job cancel
Object ack = targetClusterClient.cancel(jobId).get(jobIdentifier.getTimeout(), TimeUnit.MILLISECONDS);
logger.info("taskId: {}, job[{}] cancel success with ack : {}", jobIdentifier.getJobId(), engineJobId, ack.toString());
} else if (EDeployMode.PERJOB == EDeployMode.getByType(deployMode)) {
// perJob cancel
if (jobIdentifier.isForceCancel()) {
return killApplication(jobIdentifier);
}
CompletableFuture completableFuture = targetClusterClient.cancelWithSavepoint(jobId, null);
Object ask = completableFuture.get(jobIdentifier.getTimeout(), TimeUnit.MILLISECONDS);
logger.info("taskId: {}, job[{}] cancelWithSavepoint success, savepoint path {}", jobIdentifier.getJobId(), engineJobId, ask.toString());
} else {
logger.warn("taskId: {}, job[{}] cancel failed Unexpected deployMode type: {}", jobIdentifier.getJobId(), engineJobId, deployMode);
}
}
return JobResult.createSuccessResult(jobIdentifier.getApplicationId(), jobIdentifier.getEngineJobId());
} catch (Exception e) {
if (taskStatus != null) {
logger.warn("taskId: {}, cancel job error jobStatus is: {}", jobIdentifier.getJobId(), taskStatus.name());
}
logger.error("taskId: {} engineJobId:{} applicationId:{} cancelJob error, try to cancel with yarnClient.", jobIdentifier.getJobId(), engineJobId, appId, e);
return JobResult.createErrorResult(e);
}
}, hadoopConf.getYarnConfiguration());
} catch (Exception exception) {
logger.error("taskId: {} engineJobId: {} applicationId: {} cancelJob error: ", jobIdentifier.getJobId(), jobIdentifier.getEngineJobId(), jobIdentifier.getApplicationId(), exception);
return JobResult.createErrorResult(exception);
}
}
Aggregations