use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class TestLocalTableDumpAndIndex method startIndexBuild.
public void startIndexBuild(String solrCoreName, IJoinTaskContext execContext, ITISCoordinator zkCoordinator, String timePoint) throws Exception {
LocalIndexBuilderTriggerFactory builderTriggerFactory = new LocalIndexBuilderTriggerFactory();
File localOfflineDir = LocalTableDumpFactory.getLocalOfflineRootDir();
String indexName = ITestDumpCommon.INDEX_COLLECTION;
String groupNum = "0";
Integer taskId = 123;
ITISFileSystem fileSystem = builderTriggerFactory.getFileSystem();
ImportDataProcessInfo buildParam = new ImportDataProcessInfo(taskId, fileSystem, zkCoordinator);
buildParam.setIndexName(indexName);
MockDataSourceFactory employeesDataSource = MockDataSourceFactory.getMockEmployeesDataSource();
List<ColumnMetaData> eployeeTableMeta = employeesDataSource.getTableMetadata(TABLE_EMPLOYEES);
String colsLiteria = eployeeTableMeta.stream().map((c) -> c.getKey()).collect(Collectors.joining(","));
buildParam.setBuildTableTitleItems(colsLiteria);
SnapshotDomain snapshot = com.qlangtech.tis.manage.common.SnapshotDomainUtils.mockEmployeeSnapshotDomain();
snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SCHEMA);
snapshot.writeResource2fs(fileSystem, buildParam.getCoreName(Integer.parseInt(groupNum)), ConfigFileReader.FILE_SOLR);
IRemoteJobTrigger buildJob = builderTriggerFactory.createBuildJob(execContext, timePoint, indexName, groupNum, buildParam);
buildJob.submitJob();
/**
* -----------------------------------------------------------
* 开始执行索引build
* -----------------------------------------------------------
*/
TestLocalTableDumpAndIndex.waitJobTerminatorAndAssert(buildJob);
// long hdfsTimeStamp, String hdfsUser, SolrCore core, File indexDir, SolrQueryResponse rsp, String taskId
indexFlowback2SolrEngineNode(solrCoreName, timePoint, localOfflineDir, taskId);
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class DistributedOverseerDataXJobSubmit method createDataXJob.
@Override
public IRemoteTaskTrigger createDataXJob(IDataXJobContext dataXJobContext, RpcServiceReference statusRpc, IDataxProcessor dataxProcessor, String dataXfileName) {
IJoinTaskContext taskContext = dataXJobContext.getTaskContext();
IAppSourcePipelineController pipelineController = taskContext.getPipelineController();
DistributedQueue<CuratorDataXTaskMessage> distributedQueue = getCuratorDistributedQueue();
// File jobPath = new File(dataxProcessor.getDataxCfgDir(null), dataXfileName);
return new AsynRemoteJobTrigger(dataXfileName) {
@Override
public void run() {
try {
CuratorDataXTaskMessage msg = getDataXJobDTO(taskContext, dataXfileName);
distributedQueue.put(msg);
pipelineController.registerAppSubExecNodeMetrixStatus(IAppSourcePipelineController.DATAX_FULL_PIPELINE + taskContext.getIndexName(), dataXfileName);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public RunningStatus getRunningStatus() {
return RunningStatus.SUCCESS;
}
@Override
public void cancel() {
pipelineController.stop(IAppSourcePipelineController.DATAX_FULL_PIPELINE + taskContext.getIndexName());
}
};
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class TestDistributedOverseerDataXJobSubmit method testPushMsgToDistributeQueue.
public void testPushMsgToDistributeQueue() {
DataXJobWorker dataxJobWorker = DataXJobWorker.getJobWorker(DataXJobWorker.K8S_DATAX_INSTANCE_NAME);
assertEquals("/datax/jobs", dataxJobWorker.getZkQueuePath());
assertEquals("192.168.28.200:2181/tis/cloud", dataxJobWorker.getZookeeperAddress());
DataxProcessor dataxProcessor = IAppSource.load(DATAX_NAME);
assertNotNull(dataxProcessor);
// IDataxProcessor dataxProcessor = EasyMock.createMock("dataxProcessor", IDataxProcessor.class);
// EasyMock.expect(dataxProcessor.getDataxCfgDir()).andReturn();
IJoinTaskContext taskContext = EasyMock.createMock("joinTaskContext", IJoinTaskContext.class);
EasyMock.expect(taskContext.getIndexName()).andReturn(DATAX_NAME);
EasyMock.expect(taskContext.getTaskId()).andReturn(DATAX_TASK_ID);
AtomicReference<ITISRpcService> ref = new AtomicReference<>();
ref.set(StatusRpcClient.AssembleSvcCompsite.MOCK_PRC);
RpcServiceReference svcRef = new RpcServiceReference(ref);
Optional<DataXJobSubmit> jobSubmit = DataXJobSubmit.getDataXJobSubmit(DataXJobSubmit.InstanceType.DISTRIBUTE);
assertTrue(jobSubmit.isPresent());
DataXJobSubmit submit = jobSubmit.get();
DataXJobSubmit.IDataXJobContext jobContext = submit.createJobContext(taskContext);
EasyMock.replay(taskContext);
// IJoinTaskContext taskContext
// , RpcServiceReference statusRpc, IDataxProcessor dataxProcessor, String dataXfileName
IRemoteTaskTrigger dataXJob = submit.createDataXJob(jobContext, svcRef, dataxProcessor, DATAX_JOB_FILE_NAME);
dataXJob.run();
EasyMock.verify(taskContext);
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project plugins by qlangtech.
the class TestHiveFlatTableBuilder method testTotalpaySummary.
public void testTotalpaySummary() throws Exception {
HiveFlatTableBuilder flatTableBuilder = (HiveFlatTableBuilder) flatTableBuilderStore.getPlugin();
// ISqlTask nodeMeta, boolean isFinalNode
// , ITemplateContext tplContext, ITaskContext taskContext, IFs2Table fs2Table, IJoinTaskStatus joinTaskStatus
IFs2Table fs2Table = new MockFs2Table();
IJoinTaskStatus joinTaskStatus = EasyMock.createMock("joinTaskStatus", IJoinTaskStatus.class);
joinTaskStatus.setComplete(true);
joinTaskStatus.createJobStatus(EasyMock.anyInt());
JobLog jobLog = new JobLog();
EasyMock.expect(joinTaskStatus.getJoblog(EasyMock.anyInt())).andReturn(jobLog).anyTimes();
joinTaskStatus.setStart();
IJoinTaskContext joinTaskContext = EasyMock.createMock("joinTaskContext", IJoinTaskContext.class);
Map<IDumpTable, ITabPartition> dateParams = Maps.newHashMap();
EasyMock.expect(joinTaskContext.getAttribute(ExecChainContextUtils.PARTITION_DATA_PARAMS)).andReturn(dateParams).anyTimes();
Map<String, Boolean> taskWorkStatus = Maps.newHashMap();
EasyMock.expect(joinTaskContext.getAttribute(AdapterTask.KEY_TASK_WORK_STATUS)).andReturn(taskWorkStatus);
ERRules erRules = EasyMock.createMock("erRules", ERRules.class);
EasyMock.expect(joinTaskContext.getAttribute("er_rules")).andReturn(erRules);
EasyMock.replay(joinTaskStatus, joinTaskContext, erRules);
MockTemplateContext tplContext = new MockTemplateContext(joinTaskContext);
flatTableBuilder.startTask((context) -> {
try (InputStream input = TestHiveFlatTableBuilder.class.getResourceAsStream("groupby_totalpay.sql")) {
// try (InputStream input = TestHiveFlatTableBuilder.class.getResourceAsStream("totalpay_summary.sql")) {
ISqlTask sqlTask = new DefaultSqlTask(IOUtils.toString(input, TisUTF8.get()));
// ISqlTask nodeMeta, boolean isFinalNode
// , ITemplateContext tplContext, ITaskContext taskContext, //
// IJoinTaskStatus joinTaskStatus
DataflowTask joinTask = flatTableBuilder.createTask(sqlTask, true, tplContext, context, joinTaskStatus);
joinTask.run();
}
});
}
use of com.qlangtech.tis.order.center.IJoinTaskContext in project tis by qlangtech.
the class TestSqlTaskNodeMeta method testGetRewriteSql.
public void testGetRewriteSql() throws Exception {
SqlTaskNodeMeta taskNodeMeta = new SqlTaskNodeMeta();
SqlDataFlowTopology topology = SqlTaskNodeMeta.getSqlDataFlowTopology(TestSupplyGoodsParse.topologyName);
assertNotNull(topology);
SqlTaskNodeMeta finalNode = topology.getFinalNode();
assertNotNull(finalNode);
taskNodeMeta.setSql(finalNode.getSql());
Map<IDumpTable, ITabPartition> dumpPartition = Maps.newHashMap();
String pt = "20200703113848";
dumpPartition.put(EntityName.parse("scmdb.warehouse_goods"), () -> pt);
dumpPartition.put(EntityName.parse("tis.stock_info_collapse"), () -> pt);
dumpPartition.put(EntityName.parse("scmdb.supplier_goods"), () -> pt);
dumpPartition.put(EntityName.parse("tis.warehouse_collapse"), () -> pt);
dumpPartition.put(EntityName.parse("tis.supplier_collapse"), () -> pt);
dumpPartition.put(EntityName.parse("scmdb.goods"), () -> pt);
dumpPartition.put(EntityName.parse("scmdb.stock_info"), () -> pt);
dumpPartition.put(EntityName.parse("scmdb.category"), () -> pt);
dumpPartition.put(EntityName.parse("scmdb.goods_sync_shop"), () -> pt);
ITemplateContext tplContext = EasyMock.createMock("templateContext", ITemplateContext.class);
IJoinTaskContext joinTaskContext = EasyMock.createMock("joinTaskContext", IJoinTaskContext.class);
EasyMock.expect(tplContext.getExecContext()).andReturn(joinTaskContext);
EasyMock.expect(joinTaskContext.getExecutePhaseRange()).andReturn(ExecutePhaseRange.fullRange()).times(2);
EasyMock.expect(joinTaskContext.getIndexShardCount()).andReturn(1).times(1);
Optional<ERRules> erRule = ERRules.getErRule(TestSupplyGoodsParse.topologyName);
assertTrue(erRule.isPresent());
EasyMock.replay(tplContext, joinTaskContext);
ISqlTask.RewriteSql rewriteSql = taskNodeMeta.getRewriteSql("supply_goods", new TabPartitions(dumpPartition), erRule.get(), tplContext, true);
assertNotNull(rewriteSql);
assertEquals(TestSqlRewriter.getScriptContent("supply_goods_rewrite_result.txt"), rewriteSql.sqlContent);
System.out.println(rewriteSql.sqlContent);
EasyMock.verify(tplContext, joinTaskContext);
}
Aggregations