use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.
the class GroupPriorityQueue method emitJob2PriorityQueue.
/**
* @return false: blocked | true: unblocked
*/
private boolean emitJob2PriorityQueue() {
boolean empty = false;
String localAddress = "";
try {
if (priorityQueueSize() >= getQueueSizeLimited()) {
return false;
}
localAddress = environmentContext.getLocalAddress();
long startId = 0L;
outLoop: while (true) {
List<ScheduleEngineJobCache> jobCaches = engineJobCacheService.listByStage(startId, localAddress, EJobCacheStage.DB.getStage(), jobResource);
if (CollectionUtils.isEmpty(jobCaches)) {
empty = true;
break;
}
for (ScheduleEngineJobCache jobCache : jobCaches) {
try {
ParamAction paramAction = PublicUtil.jsonStrToObject(jobCache.getJobInfo(), ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
jobClient.setCallBack((jobStatus) -> {
jobDealer.updateJobStatus(jobClient.getJobId(), jobStatus);
});
boolean addInner = this.addInner(jobClient, false);
LOGGER.info("jobId:{} load from db, {} emit job to queue.", jobClient.getJobId(), addInner ? "success" : "failed");
if (!addInner) {
empty = false;
break outLoop;
}
startId = jobCache.getId();
} catch (Exception e) {
LOGGER.error("", e);
// 数据转换异常--打日志
jobDealer.dealSubmitFailJob(jobCache.getJobId(), "This task stores information exception and cannot be converted." + e.toString());
}
}
}
} catch (Exception e) {
LOGGER.error("emitJob2PriorityQueue localAddress:{} error:", localAddress, e);
}
if (empty) {
blocked.set(false);
}
return empty;
}
use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.
the class NodeRecoverService method recoverJobCaches.
public void recoverJobCaches() {
String localAddress = environmentContext.getLocalAddress();
try {
long startId = 0L;
while (true) {
List<ScheduleEngineJobCache> jobCaches = engineJobCacheService.listByStage(startId, localAddress, EJobCacheStage.SUBMITTED.getStage(), null);
if (CollectionUtils.isEmpty(jobCaches)) {
break;
}
List<JobClient> afterJobClients = new ArrayList<>(jobCaches.size());
for (ScheduleEngineJobCache jobCache : jobCaches) {
try {
ParamAction paramAction = PublicUtil.jsonStrToObject(jobCache.getJobInfo(), ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
afterJobClients.add(jobClient);
startId = jobCache.getId();
} catch (Exception e) {
LOGGER.error("", e);
// 数据转换异常--打日志
jobDealer.dealSubmitFailJob(jobCache.getJobId(), "This task stores information exception and cannot be converted." + ExceptionUtil.getErrorMessage(e));
}
}
if (CollectionUtils.isNotEmpty(afterJobClients)) {
jobDealer.afterSubmitJobVast(afterJobClients);
}
}
} catch (Exception e) {
LOGGER.error("----broker:{} RecoverDealer error:", localAddress, e);
}
}
use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.
the class HadoopClient method main.
public static void main(String[] args) throws Exception {
FileInputStream fileInputStream = null;
InputStreamReader inputStreamReader = null;
BufferedReader reader = null;
try {
System.setProperty("HADOOP_USER_NAME", "admin");
// input params json file path
String filePath = args[0];
File paramsFile = new File(filePath);
fileInputStream = new FileInputStream(paramsFile);
inputStreamReader = new InputStreamReader(fileInputStream);
reader = new BufferedReader(inputStreamReader);
String request = reader.readLine();
Map params = PublicUtil.jsonStrToObject(request, Map.class);
ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
String pluginInfo = jobClient.getPluginInfo();
Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
String md5plugin = MD5Util.getMd5String(pluginInfo);
properties.setProperty("md5sum", md5plugin);
HadoopClient client = new HadoopClient();
client.init(properties);
ClusterResource clusterResource = client.getClusterResource();
LOG.info("submit success!");
LOG.info(clusterResource.toString());
System.exit(0);
} catch (Exception e) {
LOG.error("submit error!", e);
} finally {
if (reader != null) {
reader.close();
inputStreamReader.close();
fileInputStream.close();
}
}
}
use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.
the class DtYarnClient method main.
public static void main(String[] args) throws Exception {
System.setProperty("HADOOP_USER_NAME", "admin");
// input params json file path
String filePath = args[0];
File paramsFile = new File(filePath);
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(paramsFile)));
String request = reader.readLine();
Map params = PublicUtil.jsonStrToObject(request, Map.class);
ParamAction paramAction = PublicUtil.mapToObject(params, ParamAction.class);
JobClient jobClient = new JobClient(paramAction);
String pluginInfo = jobClient.getPluginInfo();
Properties properties = PublicUtil.jsonStrToObject(pluginInfo, Properties.class);
String md5plugin = MD5Util.getMd5String(pluginInfo);
properties.setProperty("md5sum", md5plugin);
DtYarnClient client = new DtYarnClient();
client.init(properties);
ClusterResource clusterResource = client.getClusterResource();
LOG.info("submit success!");
LOG.info(clusterResource.toString());
System.exit(0);
}
use of com.dtstack.taier.pluginapi.pojo.ParamAction in project Taier by DTStack.
the class YarnMockUtil method mockJobClient.
public static JobClient mockJobClient(String jobType, String sqlNewText, String jarPath) throws Exception {
String taskId = "9999";
String sqlText = "ADD JAR WITH /data/sftp/21_window_WindowJoin.jar AS dtstack.WindowJoin";
if (StringUtils.isNotEmpty(sqlNewText)) {
sqlText = sqlNewText;
}
ParamAction paramAction = new ParamAction();
if ("perJob".equalsIgnoreCase(jobType)) {
paramAction.setTaskType(0);
paramAction.setComputeType(0);
} else {
paramAction.setTaskType(1);
paramAction.setComputeType(1);
}
paramAction.setJobId(taskId);
paramAction.setSqlText(sqlText);
paramAction.setTenantId(0L);
paramAction.setTaskParams("{\"test\":\"test\"}");
paramAction.setExternalPath("/tmp/savepoint");
Map<String, Object> map = new HashMap();
map.put("yarnConf", new HashMap());
paramAction.setPluginInfo(map);
JobClient jobClient = new JobClient(paramAction);
JarFileInfo jarFileInfo = new JarFileInfo();
jarFileInfo.setJarPath(jarPath);
jarFileInfo.setMainClass("dtstack.WindowJoin");
jobClient.setCoreJarInfo(jarFileInfo);
return jobClient;
}
Aggregations