use of com.qlangtech.tis.exec.ExecuteResult in project tis by qlangtech.
the class TestDataXExecuteInterceptor method executeJobTrigger.
private void executeJobTrigger(IRemoteTaskTrigger jobTrigger, boolean finalSuccess) throws Exception {
int testTaskId = 999;
TrackableExecuteInterceptor.initialTaskPhase(testTaskId);
DataXJobSubmit.mockGetter = () -> new TestIndexSwapTaskflowLauncherWithDataXTrigger.MockDataXJobSubmit(jobTrigger);
DataXExecuteInterceptor executeInterceptor = new DataXExecuteInterceptor();
// @Override
// protected IRemoteJobTrigger createDataXJob(DataXJobSubmit.IDataXJobContext execChainContext
// , DataXJobSubmit submit, DataXJobSubmit.InstanceType expectDataXJobSumit
// , RpcServiceReference statusRpc, DataxProcessor appSource, String fileName) {
// assertEquals(dataCfgFileName, fileName);
// return jobTrigger;
// }
// };
IExecChainContext execChainContext = mock("execChainContext", IExecChainContext.class);
execChainContext.rebindLoggingMDCParams();
EasyMock.expect(execChainContext.getIndexName()).andReturn(AP_NAME);
EasyMock.expect(execChainContext.getTaskId()).andReturn(testTaskId).anyTimes();
// getTaskId
MockDataxProcessor dataxProcessor = new MockDataxProcessor();
EasyMock.expect(execChainContext.getAppSource()).andReturn(dataxProcessor);
this.replay();
ExecuteResult executeResult = executeInterceptor.execute(execChainContext);
assertEquals("execute must be " + (finalSuccess ? "success" : "faild"), finalSuccess, executeResult.isSuccess());
this.verifyAll();
}
use of com.qlangtech.tis.exec.ExecuteResult in project tis by qlangtech.
the class DataFlowAppSource method executeDAG.
private ExecuteResult executeDAG(IExecChainContext execChainContext, SqlTaskNodeMeta.SqlDataFlowTopology topology, IDataProcessFeedback dataProcessFeedback, Map<String, TISReactor.TaskAndMilestone> taskMap) {
final ExecuteResult[] faildResult = new ExecuteResult[1];
try {
TISReactor reactor = new TISReactor(execChainContext, taskMap);
String dagSessionSpec = topology.getDAGSessionSpec();
logger.info("dagSessionSpec:" + dagSessionSpec);
// final PrintWriter w = new PrintWriter(sw, true);
ReactorListener listener = new ReactorListener() {
// TODO: Does it really needs handlers to be synchronized?
@Override
public synchronized void onTaskCompleted(Task t) {
processTaskResult(execChainContext, (TISReactor.TaskImpl) t, dataProcessFeedback, new ITaskResultProcessor() {
@Override
public void process(DumpPhaseStatus dumpPhase, TISReactor.TaskImpl task) {
}
@Override
public void process(JoinPhaseStatus joinPhase, TISReactor.TaskImpl task) {
}
});
}
@Override
public synchronized void onTaskFailed(Task t, Throwable err, boolean fatal) {
// w.println("Failed " + t.getDisplayName() + " with " + err);
processTaskResult(execChainContext, (TISReactor.TaskImpl) t, dataProcessFeedback, new ITaskResultProcessor() {
@Override
public void process(DumpPhaseStatus dumpPhase, TISReactor.TaskImpl task) {
dataProcessFeedback.reportDumpTableStatusError(execChainContext, task);
}
@Override
public void process(JoinPhaseStatus joinPhase, TISReactor.TaskImpl task) {
JoinPhaseStatus.JoinTaskStatus stat = joinPhase.getTaskStatus(task.getIdentityName());
// statReceiver.reportBuildIndexStatErr(execContext.getTaskId(),task.getIdentityName());
stat.setWaiting(false);
stat.setFaild(true);
stat.setComplete(true);
}
});
}
};
// 执行DAG地调度
reactor.execute(executorService, reactor.buildSession(dagSessionSpec), listener, new ReactorListener() {
@Override
public void onTaskCompleted(Task t) {
// dumpPhaseStatus.isComplete();
// joinPhaseStatus.isComplete();
}
@Override
public void onTaskFailed(Task t, Throwable err, boolean fatal) {
logger.error(t.getDisplayName(), err);
faildResult[0] = ExecuteResult.createFaild().setMessage("status.runningStatus.isComplete():" + err.getMessage());
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
return faildResult[0];
}
use of com.qlangtech.tis.exec.ExecuteResult in project tis by qlangtech.
the class DataXExecuteInterceptor method execute.
@Override
protected ExecuteResult execute(IExecChainContext execChainContext) throws Exception {
int nThreads = 2;
final ExecutorService executorService = new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(DataXJobSubmit.MAX_TABS_NUM_IN_PER_JOB), Executors.defaultThreadFactory());
final Map<String, TISReactor.TaskAndMilestone> taskMap = Maps.newHashMap();
RpcServiceReference statusRpc = getDataXExecReporter();
DataxProcessor appSource = execChainContext.getAppSource();
IRemoteTaskTrigger jobTrigger = null;
RunningStatus runningStatus = null;
List<IRemoteTaskTrigger> triggers = Lists.newArrayList();
List<File> cfgFileNames = appSource.getDataxCfgFileNames(null);
if (CollectionUtils.isEmpty(cfgFileNames)) {
throw new IllegalStateException("dataX cfgFileNames can not be empty");
}
DataXJobSubmit.InstanceType expectDataXJobSumit = getDataXTriggerType();
Optional<DataXJobSubmit> jobSubmit = DataXJobSubmit.getDataXJobSubmit(expectDataXJobSumit);
// 如果分布式worker ready的话
if (!jobSubmit.isPresent()) {
throw new IllegalStateException("can not find expect jobSubmit by type:" + expectDataXJobSumit);
}
DataXJobSubmit submit = jobSubmit.get();
final DataXJobSubmit.IDataXJobContext dataXJobContext = submit.createJobContext(execChainContext);
Objects.requireNonNull(dataXJobContext, "dataXJobContext can not be null");
try {
DumpPhaseStatus dumpStatus = this.getPhaseStatus(execChainContext, FullbuildPhase.FullDump);
for (File fileName : cfgFileNames) {
jobTrigger = createDataXJob(dataXJobContext, submit, expectDataXJobSumit, statusRpc, appSource, fileName.getName());
triggers.add(jobTrigger);
taskMap.put(fileName.getName(), new TISReactor.TaskAndMilestone(DataflowTask.createDumpTask(jobTrigger)));
// StatusRpcClient.AssembleSvcCompsite svc = statusRpc.get();
// 将任务注册,可供页面展示
// svc.reportDumpJobStatus(false, false, true, execChainContext.getTaskId()
// , fileName.getName(), 0, 0);
dumpStatus.getTable(fileName.getName()).setWaiting(true);
}
logger.info("trigger dataX jobs by mode:{},with:{}", this.getDataXTriggerType(), cfgFileNames.stream().map((f) -> f.getName()).collect(Collectors.joining(",")));
// for (IRemoteJobTrigger t : triggers) {
// t.submitJob();
// }
IDataxReader reader = appSource.getReader(null);
List<ISelectedTab> selectedTabs = reader.getSelectedTabs();
// Map<String, IDataxProcessor.TableAlias> tabAlias = appSource.getTabAlias();
IDataxWriter writer = appSource.getWriter(null);
if (writer instanceof IDataXBatchPost) {
IDataXBatchPost batchPostTask = (IDataXBatchPost) writer;
JoinPhaseStatus phaseStatus = this.getPhaseStatus(execChainContext, FullbuildPhase.JOIN);
for (ISelectedTab entry : selectedTabs) {
IRemoteTaskTrigger postTaskTrigger = batchPostTask.createPostTask(execChainContext, entry);
triggers.add(postTaskTrigger);
JoinPhaseStatus.JoinTaskStatus taskStatus = phaseStatus.getTaskStatus(postTaskTrigger.getTaskName());
taskStatus.setWaiting(true);
taskMap.put(postTaskTrigger.getTaskName(), new TISReactor.TaskAndMilestone(createJoinTask(postTaskTrigger, taskStatus)));
}
}
// example: "->a ->b a,b->c"
String dagSessionSpec = triggers.stream().map((trigger) -> {
List<String> dpts = trigger.getTaskDependencies();
return dpts.stream().collect(Collectors.joining(",")) + "->" + trigger.getTaskName();
}).collect(Collectors.joining(" "));
logger.info("dataX:{} of dagSessionSpec:{}", execChainContext.getIndexName(), dagSessionSpec);
ExecuteResult[] faildResult = new ExecuteResult[] { ExecuteResult.createSuccess() };
this.executeDAG(executorService, execChainContext, dagSessionSpec, taskMap, new ReactorListener() {
@Override
public void onTaskCompleted(Task t) {
// dumpPhaseStatus.isComplete();
// joinPhaseStatus.isComplete();
}
@Override
public void onTaskFailed(Task t, Throwable err, boolean fatal) {
logger.error(t.getDisplayName(), err);
faildResult[0] = ExecuteResult.createFaild().setMessage("status.runningStatus.isComplete():" + err.getMessage());
if (err instanceof InterruptedException) {
logger.warn("DataX Name:{},taskid:{} has been canceled", execChainContext.getIndexName(), execChainContext.getTaskId());
// this job has been cancel, trigger from TisServlet.doDelete()
for (IRemoteTaskTrigger tt : triggers) {
try {
tt.cancel();
} catch (Throwable ex) {
}
}
}
}
});
// ExecuteResult result = new ExecuteResult(!faild);
for (IRemoteTaskTrigger trigger : triggers) {
if (trigger.isAsyn()) {
execChainContext.addAsynSubJob(new IExecChainContext.AsynSubJob(trigger.getAsynJobName()));
}
}
return faildResult[0];
} finally {
try {
dataXJobContext.destroy();
} catch (Throwable e) {
logger.error(e.getMessage(), e);
}
}
}
use of com.qlangtech.tis.exec.ExecuteResult in project tis by qlangtech.
the class IndexSwapTaskflowLauncher method startWork.
/**
* 由servlet接收到命令之后触发
*
* @param execContext
* @throws Exception
*/
@SuppressWarnings("all")
public ExecuteResult startWork(DefaultChainContext chainContext) throws Exception {
chainContext.rebindLoggingMDCParams();
ActionInvocation invoke = null;
ExecutePhaseRange range = chainContext.getExecutePhaseRange();
logger.info("start component:" + range.getStart() + ",end component:" + range.getEnd());
// chainContext.setZkClient(zkClient);
Objects.requireNonNull(this.zkClient, "zkClient can not be null");
chainContext.setZkClient(this.zkClient);
// chainContext.setZkStateReader(zkStateReader);
// Objects.requireNonNull(chainContext.getIndexBuildFileSystem(), "IndexBuildFileSystem of chainContext can not be null");
// Objects.requireNonNull(chainContext.getTableDumpFactory(), "tableDumpFactory of chainContext can not be null");
// chainContext.setIndexMetaData(createIndexMetaData(chainContext));
invoke = AbstractActionInvocation.createExecChain(chainContext);
ExecuteResult execResult = invoke.invoke();
if (!execResult.isSuccess()) {
logger.warn(execResult.getMessage());
SendSMSUtils.send("[ERR]fulbud:" + chainContext.getIndexName() + " falid," + execResult.getMessage(), SendSMSUtils.BAISUI_PHONE);
}
return execResult;
}
use of com.qlangtech.tis.exec.ExecuteResult in project tis by qlangtech.
the class TestIndexSwapTaskflowLauncherWithDataXTrigger method testDataXProcessTrigger.
public void testDataXProcessTrigger() throws Exception {
IndexSwapTaskflowLauncher taskflowLauncher = new IndexSwapTaskflowLauncher();
DefaultChainContext chainContext = createRangeChainContext(FullbuildPhase.FullDump, FullbuildPhase.FullDump);
ExecuteResult executeResult = taskflowLauncher.startWork(chainContext);
assertTrue(executeResult.isSuccess());
}
Aggregations