use of com.qlangtech.tis.fullbuild.taskflow.DataflowTask in project plugins by qlangtech.
the class TestHiveFlatTableBuilder method testTotalpaySummary.
public void testTotalpaySummary() throws Exception {
HiveFlatTableBuilder flatTableBuilder = (HiveFlatTableBuilder) flatTableBuilderStore.getPlugin();
// ISqlTask nodeMeta, boolean isFinalNode
// , ITemplateContext tplContext, ITaskContext taskContext, IFs2Table fs2Table, IJoinTaskStatus joinTaskStatus
IFs2Table fs2Table = new MockFs2Table();
IJoinTaskStatus joinTaskStatus = EasyMock.createMock("joinTaskStatus", IJoinTaskStatus.class);
joinTaskStatus.setComplete(true);
joinTaskStatus.createJobStatus(EasyMock.anyInt());
JobLog jobLog = new JobLog();
EasyMock.expect(joinTaskStatus.getJoblog(EasyMock.anyInt())).andReturn(jobLog).anyTimes();
joinTaskStatus.setStart();
IJoinTaskContext joinTaskContext = EasyMock.createMock("joinTaskContext", IJoinTaskContext.class);
Map<IDumpTable, ITabPartition> dateParams = Maps.newHashMap();
EasyMock.expect(joinTaskContext.getAttribute(ExecChainContextUtils.PARTITION_DATA_PARAMS)).andReturn(dateParams).anyTimes();
Map<String, Boolean> taskWorkStatus = Maps.newHashMap();
EasyMock.expect(joinTaskContext.getAttribute(AdapterTask.KEY_TASK_WORK_STATUS)).andReturn(taskWorkStatus);
ERRules erRules = EasyMock.createMock("erRules", ERRules.class);
EasyMock.expect(joinTaskContext.getAttribute("er_rules")).andReturn(erRules);
EasyMock.replay(joinTaskStatus, joinTaskContext, erRules);
MockTemplateContext tplContext = new MockTemplateContext(joinTaskContext);
flatTableBuilder.startTask((context) -> {
try (InputStream input = TestHiveFlatTableBuilder.class.getResourceAsStream("groupby_totalpay.sql")) {
// try (InputStream input = TestHiveFlatTableBuilder.class.getResourceAsStream("totalpay_summary.sql")) {
ISqlTask sqlTask = new DefaultSqlTask(IOUtils.toString(input, TisUTF8.get()));
// ISqlTask nodeMeta, boolean isFinalNode
// , ITemplateContext tplContext, ITaskContext taskContext, //
// IJoinTaskStatus joinTaskStatus
DataflowTask joinTask = flatTableBuilder.createTask(sqlTask, true, tplContext, context, joinTaskStatus);
joinTask.run();
}
});
}
use of com.qlangtech.tis.fullbuild.taskflow.DataflowTask in project tis by qlangtech.
the class SingleTableAppSource method getProcessDataResults.
@Override
public ExecuteResult getProcessDataResults(IExecChainContext execChainContext, ISingleTableDumpFactory singleTableDumpFactory, IDataProcessFeedback dataProcessFeedback, ITaskPhaseInfo taskPhaseInfo) throws Exception {
// 复杂数据导出
DumpPhaseStatus dumpPhaseStatus = taskPhaseInfo.getPhaseStatus(execChainContext, FullbuildPhase.FullDump);
DataflowTask tabDump = null;
DependencyNode dump = new DependencyNode();
dump.setId(db.getName() + "." + tabName);
dump.setName(tabName);
dump.setDbName(db.getName());
dump.setTabid(String.valueOf(tabId));
dump.setDbid(String.valueOf(db.getId()));
// for (DependencyNode dump : topology.getDumpNodes()) {
tabDump = singleTableDumpFactory.createSingleTableDump(dump, false, /* isHasValidTableDump */
"tableDump.getPt()", execChainContext.getZkClient(), execChainContext, dumpPhaseStatus);
tabDump.run();
return ExecuteResult.SUCCESS;
}
use of com.qlangtech.tis.fullbuild.taskflow.DataflowTask in project tis by qlangtech.
the class DataFlowAppSource method getProcessDataResults.
// @Override
// public List<PrimaryTableMeta> getPrimaryTabs() {
// return getErRules().getPrimaryTabs();
// }
@Override
public ExecuteResult getProcessDataResults(IExecChainContext execChainContext, ISingleTableDumpFactory singleTableDumpFactory, IDataProcessFeedback dataProcessFeedback, ITaskPhaseInfo taskPhaseInfo) throws Exception {
// 执行工作流数据结构
SqlTaskNodeMeta.SqlDataFlowTopology topology = SqlTaskNodeMeta.getSqlDataFlowTopology(dataflowName);
Map<String, TISReactor.TaskAndMilestone> /**
* taskid
*/
taskMap = Maps.newHashMap();
// 取得workflowdump需要依赖的表
Collection<DependencyNode> tables = topology.getDumpNodes();
StringBuffer dumps = new StringBuffer("dependency table:\n");
dumps.append("\t\t=======================\n");
for (DependencyNode t : tables) {
dumps.append("\t\t").append(t.getDbName()).append(".").append(t.getName()).append("[").append(t.getTabid()).append(",").append("] \n");
}
dumps.append("\t\t=======================\n");
logger.info(dumps.toString());
// 将所有的表的状态先初始化出来
DumpPhaseStatus dumpPhaseStatus = taskPhaseInfo.getPhaseStatus(execChainContext, FullbuildPhase.FullDump);
DataflowTask tabDump = null;
for (DependencyNode dump : topology.getDumpNodes()) {
tabDump = singleTableDumpFactory.createSingleTableDump(dump, false, /* isHasValidTableDump */
"tableDump.getPt()", execChainContext.getZkClient(), execChainContext, dumpPhaseStatus);
taskMap.put(dump.getId(), new TISReactor.TaskAndMilestone(tabDump));
}
if (topology.isSingleTableModel()) {
return executeDAG(execChainContext, topology, dataProcessFeedback, taskMap);
} else {
final ExecuteResult[] faildResult = new ExecuteResult[1];
TemplateContext tplContext = new TemplateContext(execChainContext);
JoinPhaseStatus joinPhaseStatus = taskPhaseInfo.getPhaseStatus(execChainContext, FullbuildPhase.JOIN);
IPluginStore<FlatTableBuilder> pluginStore = TIS.getPluginStore(FlatTableBuilder.class);
Objects.requireNonNull(pluginStore.getPlugin(), "flatTableBuilder can not be null");
// chainContext.setFlatTableBuilderPlugin(pluginStore.getPlugin());
// execChainContext.getFlatTableBuilder();
final IFlatTableBuilder flatTableBuilder = pluginStore.getPlugin();
final SqlTaskNodeMeta fNode = topology.getFinalNode();
flatTableBuilder.startTask((context) -> {
DataflowTask process = null;
for (SqlTaskNodeMeta pnode : topology.getNodeMetas()) {
/**
* ***********************************
* 构建宽表构建任务节点
* ************************************
*/
process = flatTableBuilder.createTask(pnode, StringUtils.equals(fNode.getId(), pnode.getId()), tplContext, context, joinPhaseStatus.getTaskStatus(pnode.getExportName()));
taskMap.put(pnode.getId(), new TISReactor.TaskAndMilestone(process));
}
faildResult[0] = executeDAG(execChainContext, topology, dataProcessFeedback, taskMap);
});
return faildResult[0];
}
}
Aggregations