use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class DtYarnClient method main.
public static void main(String[] args) throws Exception {
System.setProperty("HADOOP_USER_NAME", "admin");
// input params json file path
String filePath = args[0];
File paramsFile = new File(filePath);
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(paramsFile)));
String request = reader.readLine();
Map params = PublicUtil.jsonStrToObject(request, Map.class);
ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
String pluginInfo = jobClient.getPluginInfo();
Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
String md5plugin = MD5Util.getMd5String(pluginInfo);
properties.setProperty("md5sum", md5plugin);
DtYarnClient client = new DtYarnClient();
client.init(properties);
ClusterResource clusterResource = client.getClusterResource();
LOG.info("submit success!");
LOG.info(clusterResource.toString());
System.exit(0);
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class YarnMockUtil method mockJobClient.
public static JobClient mockJobClient(String jobType, String sqlNewText, String jarPath) throws Exception {
String taskId = "9999";
String sqlText = "ADD JAR WITH /data/sftp/21_window_WindowJoin.jar AS dtstack.WindowJoin";
if (StringUtils.isNotEmpty(sqlNewText)) {
sqlText = sqlNewText;
}
ParamAction paramAction = new ParamAction();
if ("perJob".equalsIgnoreCase(jobType)) {
paramAction.setTaskType(0);
paramAction.setComputeType(0);
} else {
paramAction.setTaskType(1);
paramAction.setComputeType(1);
}
paramAction.setJobId(taskId);
paramAction.setSqlText(sqlText);
paramAction.setTenantId(0L);
paramAction.setTaskParams("{\"test\":\"test\"}");
paramAction.setExternalPath("/tmp/savepoint");
Map<String, Object> map = new HashMap();
map.put("yarnConf", new HashMap());
paramAction.setPluginInfo(map);
JobClient jobClient = new JobClient(paramAction);
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(jarPath);
jarFileInfo.setMainClass("dtstack.WindowJoin");
jobClient.setCoreJarInfo(jarFileInfo);
return jobClient;
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class SyncPluginInfoTest method testCreateSyncPluginArgs.
@Test
public void testCreateSyncPluginArgs() {
JobClient jobClient = new JobClient();
jobClient.setClassArgs("-jobid flink_test_stream");
jobClient.setComputeType(ComputeType.STREAM);
Whitebox.setInternalState(jobClient, "confProperties", new Properties());
FlinkConfig flinkConfig = new FlinkConfig();
flinkConfig.setRemoteFlinkJarPath("/opt/dtstack/110_flinkplugin/");
flinkConfig.setFlinkPluginRoot("/opt/dtstack/110_flinkplugin/");
flinkConfig.setMonitorAddress("http://localhost:8088");
flinkConfig.setPluginLoadMode("shipfile");
SyncPluginInfo syncPluginInfo = SyncPluginInfo.create(flinkConfig);
List<String> args = syncPluginInfo.createSyncPluginArgs(jobClient, new FlinkClient());
String result = new Gson().toJson(args);
String expectStr = "[\"-jobid\",\"flink_test_stream\",\"-monitor\",\"http://localhost:8088\",\"-pluginLoadMode\",\"shipfile\",\"-mode\",\"yarnPer\"]";
Assert.assertEquals(expectStr, result);
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class RdbsExeQueueTest method testSubmit.
@Test
public void testSubmit() {
JobClient jobClient = new JobClient();
jobClient.setJobId("test");
String submit = rdbsExeQueue.submit(jobClient);
Assert.assertNotNull(submit);
}
use of com.dtstack.taier.pluginapi.JobClient in project Taier by DTStack.
the class ConsoleService method jobStick.
public Boolean jobStick(String jobId) {
try {
ScheduleEngineJobCache engineJobCache = scheduleEngineJobCacheMapper.getOne(jobId);
if (null == engineJobCache) {
return false;
}
// 只支持DB、PRIORITY两种调整顺序
if (EJobCacheStage.DB.getStage() == engineJobCache.getStage() || EJobCacheStage.PRIORITY.getStage() == engineJobCache.getStage()) {
ParamAction paramAction = PublicUtil.jsonStrToObject(engineJobCache.getJobInfo(), ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
jobClient.setCallBack((jobStatus) -> {
jobDealer.updateJobStatus(jobClient.getJobId(), jobStatus);
});
Long minPriority = scheduleEngineJobCacheMapper.minPriorityByStage(engineJobCache.getJobResource(), Lists.newArrayList(EJobCacheStage.PRIORITY.getStage()), engineJobCache.getNodeAddress());
minPriority = minPriority == null ? 0 : minPriority;
jobClient.setPriority(minPriority - 1);
if (EJobCacheStage.PRIORITY.getStage() == engineJobCache.getStage()) {
// 先将队列中的元素移除,重复插入会被忽略
GroupPriorityQueue groupPriorityQueue = jobDealer.getGroupPriorityQueue(engineJobCache.getJobResource());
groupPriorityQueue.remove(jobClient);
}
return jobDealer.addGroupPriorityQueue(engineJobCache.getJobResource(), jobClient, false, false);
}
} catch (Exception e) {
LOGGER.error("jobStick error:", e);
}
return false;
}
Aggregations