use of com.dtstack.taier.pluginapi.JobIdentifier in project Taier by DTStack.
the class Launcher method main.
public static void main(String[] args) throws Exception {
System.setProperty("HADOOP_USER_NAME", "admin");
// job json path
String jobJsonPath = USER_DIR + SP + "local-test/src/main/json/dtscript-agent.json";
// create jobClient
String content = getJobContent(jobJsonPath);
Map params = PublicUtil.jsonStrToObject(content, Map.class);
ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
// create jobIdentifier
String jobId = "jobId";
String appId = "appId";
String taskId = "taskId";
JobIdentifier jobIdentifier = JobIdentifier.createInstance(jobId, appId, taskId);
// get pluginInfo
String pluginInfo = jobClient.getPluginInfo();
Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
String md5plugin = MD5Util.getMd5String(pluginInfo);
properties.setProperty("md5sum", md5plugin);
// create client
String pluginParentPath = USER_DIR + SP + "pluginLibs";
IClient client = ClientFactory.buildPluginClient(pluginInfo, pluginParentPath);
// client init
ClassLoaderCallBackMethod.callbackAndReset(new CallBack<String>() {
@Override
public String execute() throws Exception {
client.init(properties);
return null;
}
}, client.getClass().getClassLoader(), true);
// test target method
ClassLoaderCallBackMethod.callbackAndReset(new CallBack<Object>() {
@Override
public Object execute() throws Exception {
JobResult jobResult = client.submitJob(jobClient);
return jobResult;
}
}, client.getClass().getClassLoader(), true);
LOG.info("Launcher Success!");
System.exit(0);
}
use of com.dtstack.taier.pluginapi.JobIdentifier in project Taier by DTStack.
the class FlinkClientTest method testGetJobLog.
@Test
public void testGetJobLog() throws Exception {
String jobId = "40c01cd0c53928fff6a55e8d8b8b022c";
String appId = "application_1594003499276_1278";
String taskId = "taskId";
JobIdentifier jobIdentifier = JobIdentifier.createInstance(jobId, appId, taskId);
PowerMockito.mockStatic(PoolHttpClient.class);
when(PoolHttpClient.get(any())).thenReturn("{\"app\":{\"amContainerLogs\":\"http://dtstack01:8088/ws/v1/cluster/apps/application_9527\"}}");
ApplicationReportPBImpl report = YarnMockUtil.mockApplicationReport(null);
when(yarnClient.getApplicationReport(any())).thenReturn(report);
when(flinkClientBuilder.getYarnClient()).thenReturn(yarnClient);
ClusterClient currClient = YarnMockUtil.mockClusterClient();
when(flinkClusterClientManager.getClusterClient(any())).thenReturn(currClient);
String jobLog = flinkClient.getJobLog(jobIdentifier);
Assert.assertNotNull(jobLog);
ApplicationReportPBImpl reportFinish = YarnMockUtil.mockApplicationReport(YarnApplicationState.FINISHED);
when(yarnClient.getApplicationReport(any())).thenReturn(reportFinish);
when(flinkClientBuilder.getYarnClient()).thenReturn(yarnClient);
Configuration flinkConfig = new Configuration();
flinkConfig.setString(HistoryServerOptions.HISTORY_SERVER_WEB_ADDRESS, "dtstack01");
flinkConfig.setString(String.valueOf(HistoryServerOptions.HISTORY_SERVER_WEB_PORT), "9527");
when(flinkClientBuilder.getFlinkConfiguration()).thenReturn(flinkConfig);
String jobLogFinished = flinkClient.getJobLog(jobIdentifier);
Assert.assertNotNull(jobLogFinished);
}
use of com.dtstack.taier.pluginapi.JobIdentifier in project Taier by DTStack.
the class AbstractRdbsClientTest method testGetJobStatus.
@Test
public void testGetJobStatus() throws Exception {
RdbsExeQueue rdbsExeQueue = PowerMockito.mock(RdbsExeQueue.class);
when(rdbsExeQueue.getJobStatus(any(String.class))).thenReturn(TaskStatus.RUNNING);
MemberModifier.field(TestRdbsClient.class, "exeQueue").set(testRdbsClient, rdbsExeQueue);
JobIdentifier jobIdentifier = JobIdentifier.createInstance("test", "test", "test");
TaskStatus status = testRdbsClient.getJobStatus(jobIdentifier);
Assert.assertEquals(status, TaskStatus.RUNNING);
}
use of com.dtstack.taier.pluginapi.JobIdentifier in project Taier by DTStack.
the class AbstractRdbsClientTest method testGetJobLog.
@Test
public void testGetJobLog() throws Exception {
RdbsExeQueue rdbsExeQueue = PowerMockito.mock(RdbsExeQueue.class);
when(rdbsExeQueue.getJobLog(any(String.class))).thenReturn("job log");
MemberModifier.field(TestRdbsClient.class, "exeQueue").set(testRdbsClient, rdbsExeQueue);
JobIdentifier jobIdentifier = JobIdentifier.createInstance("test", "test", "test");
String jobLog = testRdbsClient.getJobLog(jobIdentifier);
Assert.assertEquals(jobLog, "job log");
}
use of com.dtstack.taier.pluginapi.JobIdentifier in project Taier by DTStack.
the class JobStatusDealer method dealJob.
private void dealJob(String jobId) throws Exception {
ScheduleJob scheduleJob = scheduleJobService.getByJobId(jobId);
ScheduleEngineJobCache engineJobCache = scheduleJobCacheService.getJobCacheByJobId(jobId);
if (scheduleJob == null || engineJobCache == null || (StringUtils.isBlank(scheduleJob.getApplicationId()) && StringUtils.isBlank(scheduleJob.getEngineJobId()))) {
shardCache.updateLocalMemTaskStatus(jobId, TaskStatus.CANCELED.getStatus());
Integer status = TaskStatus.CANCELED.getStatus();
String engineJobId = null;
if (scheduleJob != null) {
engineJobId = scheduleJob.getEngineJobId();
if (TaskStatus.getStoppedStatus().contains(scheduleJob.getStatus())) {
status = scheduleJob.getStatus();
} else {
scheduleJobService.updateJobStatusAndExecTime(jobId, status);
}
} else {
scheduleJobService.updateJobStatusAndExecTime(jobId, status);
}
scheduleJobCacheService.deleteByJobId(jobId);
LOGGER.info("jobId:{} set job finished, status:{}, scheduleJob is {} null, engineJobCache is {} null, engineJobId is {} blank.", jobId, status, scheduleJob == null ? "" : "not", engineJobCache == null ? "" : "not", engineJobId == null ? "" : "not");
} else {
String engineTaskId = scheduleJob.getEngineJobId();
String appId = scheduleJob.getApplicationId();
ParamAction paramAction = PublicUtil.jsonStrToObject(engineJobCache.getJobInfo(), ParamAction.class);
Integer taskType = paramAction.getTaskType();
Map<String, Object> pluginInfo = paramAction.getPluginInfo();
JobIdentifier jobIdentifier = new JobIdentifier(engineTaskId, appId, jobId, scheduleJob.getTenantId(), taskType, TaskParamsUtils.parseDeployTypeByTaskParams(paramAction.getTaskParams(), scheduleJob.getComputeType()).getType(), null, MapUtils.isEmpty(pluginInfo) ? null : JSONObject.toJSONString(pluginInfo), paramAction.getComponentVersion());
TaskStatus taskStatus = workerOperator.getJobStatus(jobIdentifier);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("------ jobId:{} dealJob status:{}", jobId, taskStatus);
}
if (taskStatus != null) {
taskStatus = checkNotFoundStatus(taskStatus, jobId);
Integer status = taskStatus.getStatus();
// 重试状态 先不更新状态
boolean isRestart = jobRestartDealer.checkAndRestart(status, scheduleJob, engineJobCache, (job, client) -> ForkJoinPool.commonPool().execute(() -> {
String engineLog = workerOperator.getEngineLog(jobIdentifier);
jobRestartDealer.jobRetryRecord(job, client, engineLog);
}));
if (isRestart) {
LOGGER.info("----- jobId:{} after dealJob status:{}", jobId, taskStatus);
return;
}
shardCache.updateLocalMemTaskStatus(jobId, status);
updateJobStatusWithPredicate(scheduleJob, jobId, status);
// 数据的更新顺序,先更新job_cache,再更新engine_batch_job
if (TaskStatus.getStoppedStatus().contains(status)) {
jobLogDelayDealer(jobId, jobIdentifier, engineJobCache.getComputeType(), scheduleJob.getType());
jobStatusFrequency.remove(jobId);
scheduleJobCacheService.deleteByJobId(jobId);
LOGGER.info("------ jobId:{} is stop status {} delete jobCache", jobId, status);
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("------ jobId:{} after dealJob status:{}", jobId, taskStatus);
}
}
}
}
Aggregations