use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class GroupPriorityQueueTest method testPriorityQueueJob.
@Test
public void testPriorityQueueJob() {
PriorityBlockingQueue<JobClient> queue = new PriorityBlockingQueue<JobClient>(10, new JobClientComparator());
JobClient job3 = new JobClient();
job3.setPriority(3);
JobClient job4 = new JobClient();
job4.setPriority(4);
JobClient job1 = new JobClient();
job1.setPriority(1);
JobClient job2 = new JobClient();
job2.setPriority(2);
JobClient job5 = new JobClient();
job5.setPriority(5);
queue.put(job1);
queue.put(job2);
queue.put(job3);
queue.put(job4);
queue.put(job5);
JobClient jjj = queue.poll();
Assert.assertEquals(jjj.getPriority(), job1.getPriority());
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class JobRestartDealer method checkJobInfo.
private Pair<Boolean, JobClient> checkJobInfo(String jobId, ScheduleEngineJobCache jobCache, Integer status) {
Pair<Boolean, JobClient> check = new Pair<>(false, null);
if (!TaskStatus.FAILED.getStatus().equals(status) && !TaskStatus.SUBMITFAILD.getStatus().equals(status)) {
return check;
}
try {
String jobInfo = jobCache.getJobInfo();
ParamAction paramAction = PublicUtil.jsonStrToObject(jobInfo, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
if (!jobClient.getIsFailRetry()) {
LOGGER.info("[retry=false] jobId:{} isFailRetry:{} isFailRetry is false.", jobClient.getJobId(), jobClient.getIsFailRetry());
return check;
}
return new Pair<>(true, jobClient);
} catch (Exception e) {
// 解析任务的jobInfo反序列到ParamAction失败,任务不进行重试.
LOGGER.error("[retry=false] jobId:{} default not retry, because getIsFailRetry happens error:.", jobId, e);
return check;
}
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class JobRestartDealer method checkAndRestart.
/**
* 对任务状态判断是否需要重试
* @param status
* @param scheduleJob
* @param jobCache
* @return
*/
public boolean checkAndRestart(Integer status, ScheduleJob scheduleJob, ScheduleEngineJobCache jobCache, BiConsumer<ScheduleJob, JobClient> saveRetryFunction) {
Pair<Boolean, JobClient> checkResult = checkJobInfo(scheduleJob.getJobId(), jobCache, status);
if (!checkResult.getKey()) {
return false;
}
JobClient jobClient = checkResult.getValue();
// 是否需要重新提交
int alreadyRetryNum = getAlreadyRetryNum(scheduleJob.getJobId());
if (alreadyRetryNum >= jobClient.getMaxRetryNum()) {
LOGGER.info("[retry=false] jobId:{} alreadyRetryNum:{} maxRetryNum:{}, alreadyRetryNum >= maxRetryNum.", jobClient.getJobId(), alreadyRetryNum, jobClient.getMaxRetryNum());
return false;
}
// 通过engineJobId或appId获取日志
jobClient.setEngineTaskId(scheduleJob.getEngineJobId());
jobClient.setApplicationId(scheduleJob.getApplicationId());
jobClient.setCallBack((jobStatus) -> updateJobStatus(scheduleJob.getJobId(), jobStatus));
boolean retry = restartJob(jobClient, saveRetryFunction);
LOGGER.info("【retry={}】 jobId:{} alreadyRetryNum:{} will retry and add into queue again.", retry, jobClient.getJobId(), alreadyRetryNum);
return retry;
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class FlinkClientTest method testGrammarCheck.
/*@Test
public void testGetJobStatus() throws Exception {
String jobId = "40c01cd0c53928fff6a55e8d8b8b022c";
String appId = "application_1594003499276_1278";
String taskId = "taskId";
JobIdentifier jobIdentifier = JobIdentifier.createInstance(jobId, appId, taskId);
ApplicationReportPBImpl report = YarnMockUtil.mockApplicationReport(null);
when(yarnClient.getApplicationReport(any())).thenReturn(report);
when(flinkClientBuilder.getYarnClient()).thenReturn(yarnClient);
RdosTaskStatus jobStatus = flinkClient.getJobStatus(jobIdentifier);
Assert.assertNotNull(jobStatus);
PowerMockito.mockStatic(PoolHttpClient.class);
when(PoolHttpClient.get(any())).thenReturn("{\"state\":\"RUNNING\"}");
ClusterClient clusterClient = YarnMockUtil.mockClusterClient();
when(flinkClusterClientManager.getClusterClient(null)).thenReturn(clusterClient);
jobIdentifier.setApplicationId(null);
RdosTaskStatus jobStatus2 = flinkClient.getJobStatus(jobIdentifier);
Assert.assertNotNull(jobStatus2);
}*/
@Test
public void testGrammarCheck() throws Exception {
MemberModifier.field(FlinkClient.class, "cacheFile").set(flinkClient, Maps.newConcurrentMap());
String sqlPluginRootDir = temporaryFolder.newFolder("sqlPluginDir").getAbsolutePath();
temporaryFolder.newFolder("sqlPluginDir", "sqlplugin");
temporaryFolder.newFile("sqlPluginDir/sqlplugin/core-test.jar").getAbsolutePath();
FlinkConfig flinkConfig = new FlinkConfig();
flinkConfig.setFlinkPluginRoot(sqlPluginRootDir);
SqlPluginInfo sqlPluginInfo = SqlPluginInfo.create(flinkConfig);
MemberModifier.field(FlinkClient.class, "sqlPluginInfo").set(flinkClient, sqlPluginInfo);
PowerMockito.mockStatic(PackagedProgram.class);
PackagedProgram.Builder builder = PowerMockito.mock(PackagedProgram.Builder.class);
when(PackagedProgram.newBuilder()).thenReturn(builder);
PackagedProgram packagedProgram = PowerMockito.mock(PackagedProgram.class);
when(builder.setJarFile(any(File.class))).thenReturn(builder);
when(builder.setUserClassPaths(any(List.class))).thenReturn(builder);
when(builder.setConfiguration(any(Configuration.class))).thenReturn(builder);
when(builder.setArguments(any())).thenReturn(builder);
when(builder.build()).thenReturn(packagedProgram);
Configuration configuration = new Configuration();
when(flinkClientBuilder.getFlinkConfiguration()).thenReturn(configuration);
PowerMockito.mockStatic(PackagedProgramUtils.class);
when(PackagedProgramUtils.createJobGraph(any(PackagedProgram.class), any(Configuration.class), any(int.class), any(boolean.class))).thenReturn(PowerMockito.mock(JobGraph.class));
String absolutePath = temporaryFolder.newFile("core-flinksql.jar").getAbsolutePath();
String sqlText = "CREATE TABLE MyTable ( id int, name varchar ) WITH (topicIsPattern = 'false', updateMode = 'append', bootstrapServers = 'dtstack01:9092', timezone = 'Asia/Shanghai', parallelism = '1', topic = 'grammar', type = 'kafka11', enableKeyPartitions = 'false', offsetReset = 'latest'); CREATE TABLE MyResult ( id INT, name VARCHAR ) WITH (type = 'console'); INSERT INTO MyResult SELECT a.id, a.name FROM MyTable a;";
JobClient jobClient = YarnMockUtil.mockJobClient("perJob", sqlText, absolutePath);
CheckResult checkResult = flinkClient.grammarCheck(jobClient);
Assert.assertTrue(checkResult.isResult() == true);
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class HadoopClient method fillJobConfig.
private Configuration fillJobConfig(JobClient jobClient, Configuration conf) {
Configuration jobConf = new Configuration(conf);
Properties confProps = jobClient.getConfProperties();
if (confProps != null) {
confProps.stringPropertyNames().stream().filter(key -> key.toString().contains(".")).forEach(key -> jobConf.set(key.toString(), confProps.getProperty(key)));
}
return jobConf;
}
Aggregations