use of com.qlangtech.tis.datax.CuratorDataXTaskMessage in project plugins by qlangtech.
the class DistributedOverseerDataXJobSubmit method createDataXJob.
@Override
public IRemoteTaskTrigger createDataXJob(IDataXJobContext dataXJobContext, RpcServiceReference statusRpc, IDataxProcessor dataxProcessor, String dataXfileName) {
IJoinTaskContext taskContext = dataXJobContext.getTaskContext();
IAppSourcePipelineController pipelineController = taskContext.getPipelineController();
DistributedQueue<CuratorDataXTaskMessage> distributedQueue = getCuratorDistributedQueue();
// File jobPath = new File(dataxProcessor.getDataxCfgDir(null), dataXfileName);
return new AsynRemoteJobTrigger(dataXfileName) {
@Override
public void run() {
try {
CuratorDataXTaskMessage msg = getDataXJobDTO(taskContext, dataXfileName);
distributedQueue.put(msg);
pipelineController.registerAppSubExecNodeMetrixStatus(IAppSourcePipelineController.DATAX_FULL_PIPELINE + taskContext.getIndexName(), dataXfileName);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public RunningStatus getRunningStatus() {
return RunningStatus.SUCCESS;
}
@Override
public void cancel() {
pipelineController.stop(IAppSourcePipelineController.DATAX_FULL_PIPELINE + taskContext.getIndexName());
}
};
}
use of com.qlangtech.tis.datax.CuratorDataXTaskMessage in project plugins by qlangtech.
the class TaskExec method getRemoteJobTrigger.
static IRemoteTaskTrigger getRemoteJobTrigger(DataXJobSubmit.IDataXJobContext jobContext, LocalDataXJobSubmit localDataXJobSubmit, String dataXfileName) {
// final JarLoader uberClassLoader = new TISJarLoader(pluginManager);
IJoinTaskContext taskContext = jobContext.getTaskContext();
AtomicBoolean complete = new AtomicBoolean(false);
AtomicBoolean success = new AtomicBoolean(false);
return new IRemoteTaskTrigger() {
DataXJobSingleProcessorExecutor jobConsumer;
boolean hasCanceled;
// final ExecutorService dataXExecutor = jobContext.getContextInstance();
@Override
public void run() {
// dataXExecutor.submit(() -> {
try {
MDC.put(IParamContext.KEY_TASK_ID, String.valueOf(taskContext.getTaskId()));
MDC.put(TISCollectionUtils.KEY_COLLECTION, taskContext.getIndexName());
jobConsumer = new DataXJobSingleProcessorExecutor() {
@Override
protected DataXJobSubmit.InstanceType getExecMode() {
return DataXJobSubmit.InstanceType.LOCAL;
}
@Override
protected String getClasspath() {
return localDataXJobSubmit.getClasspath();
}
@Override
protected boolean useRuntimePropEnvProps() {
return false;
}
@Override
protected String[] getExtraJavaSystemPrams() {
return new String[] { // "-D" + SYSTEM_KEY_LOGBACK_PATH_KEY + "=" + SYSTEM_KEY_LOGBACK_PATH_VALUE
"-D" + CenterResource.KEY_notFetchFromCenterRepository + "=true" };
}
@Override
protected String getIncrStateCollectAddress() {
return ZkUtils.getFirstChildValue(((IExecChainContext) taskContext).getZkClient(), ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH);
}
@Override
protected String getMainClassName() {
return localDataXJobSubmit.getMainClassName();
}
@Override
protected File getWorkingDirectory() {
return localDataXJobSubmit.getWorkingDirectory();
}
};
CuratorDataXTaskMessage dataXJob = localDataXJobSubmit.getDataXJobDTO(taskContext, dataXfileName);
// new CuratorDataXTaskMessage();
// dataXJob.setJobId(taskContext.getTaskId());
// dataXJob.setJobName(dataXfileName);
// dataXJob.setDataXName(taskContext.getIndexName());
jobConsumer.consumeMessage(dataXJob);
success.set(true);
} catch (Throwable e) {
// e.printStackTrace();
success.set(false);
if (this.hasCanceled) {
logger.warn("datax:" + taskContext.getIndexName() + " has been canceled");
} else {
logger.error("datax:" + taskContext.getIndexName() + ",jobName:" + dataXfileName, e);
if (!(e instanceof DataXJobSingleProcessorException)) {
throw new RuntimeException(e);
}
}
} finally {
complete.set(true);
// shutdownExecutor();
}
// });
}
// private void shutdownExecutor() {
// try {
// dataXExecutor.shutdownNow();
// } catch (Throwable e) {
// logger.error(e.getMessage(), e);
// }
// }
@Override
public String getTaskName() {
return dataXfileName;
}
@Override
public void cancel() {
if (jobConsumer == null) {
return;
}
jobConsumer.runningTask.forEach((taskId, watchdog) -> {
watchdog.destroyProcess();
logger.info("taskId:{} relevant task has been canceled", taskId);
});
// shutdownExecutor();
this.hasCanceled = true;
}
@Override
public RunningStatus getRunningStatus() {
return new RunningStatus(0, complete.get(), success.get());
}
};
}
Aggregations