Search in sources :

Example 1 with IExecChainContext

use of com.qlangtech.tis.exec.IExecChainContext in project plugins by qlangtech.

the class TestDataXHudiWriter method testRealDump.

@Test
public void testRealDump() throws Exception {
    MDC.put(TISCollectionUtils.KEY_COLLECTION, HdfsFileSystemFactoryTestUtils.testDataXName.getName());
    MDC.put(IParamContext.KEY_TASK_ID, "123");
    HudiTest houseTest = createDataXWriter();
    long timestamp = 20220311135455l;
    // houseTest.writer.autoCreateTable = true;
    DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
    File dataXCfgDir = folder.newFolder();
    File createDDLDir = folder.newFolder();
    File createDDLFile = null;
    try {
        createDDLFile = new File(createDDLDir, HudiWriter.targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
        FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXHudiWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
        DataXCfgGenerator.GenerateCfgs genCfg = new DataXCfgGenerator.GenerateCfgs();
        genCfg.setGenTime(timestamp);
        genCfg.setGroupedChildTask(Collections.singletonMap(WriterTemplate.TAB_customer_order_relation, Lists.newArrayList(WriterTemplate.TAB_customer_order_relation + "_0")));
        genCfg.write2GenFile(dataXCfgDir);
        EasyMock.expect(dataXProcessor.getDataxCfgDir(null)).andReturn(dataXCfgDir);
        // EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
        DataxWriter.dataxWriterGetter = (dataXName) -> {
            return houseTest.writer;
        };
        DataxProcessor.processorGetter = (dataXName) -> {
            Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
            return dataXProcessor;
        };
        IExecChainContext execContext = EasyMock.mock("execContext", IExecChainContext.class);
        EasyMock.expect(execContext.getPartitionTimestamp()).andReturn(String.valueOf(timestamp));
        EasyMock.replay(dataXProcessor, execContext);
        // WriterTemplate.realExecuteDump(hudi_datax_writer_assert_without_optional, houseTest.writer, (cfg) -> {
        // cfg.set(cfgPathParameter + "." + DataxUtils.EXEC_TIMESTAMP, timestamp);
        // return cfg;
        // });
        // DataXHudiWriter hudiWriter = new DataXHudiWriter();
        // hudiWriter.dataXName = HdfsFileSystemFactoryTestUtils.testDataXName.getName();
        // hudiWriter.createPostTask(execContext, tab);
        HudiDumpPostTask postTask = (HudiDumpPostTask) houseTest.writer.createPostTask(execContext, houseTest.tab);
        Assert.assertNotNull("postTask can not be null", postTask);
        postTask.run();
        IHiveConnGetter hiveConnMeta = houseTest.writer.getHiveConnMeta();
        try (IHiveMetaStore metaStoreClient = hiveConnMeta.createMetaStoreClient()) {
            Assert.assertNotNull(metaStoreClient);
            HiveTable table = metaStoreClient.getTable(hiveConnMeta.getDbName(), WriterTemplate.TAB_customer_order_relation);
            Assert.assertNotNull(WriterTemplate.TAB_customer_order_relation + " can not be null", table);
        }
        EasyMock.verify(dataXProcessor, execContext);
    } finally {
    // FileUtils.deleteQuietly(createDDLFile);
    }
}
Also used : IHiveMetaStore(com.qlangtech.tis.config.hive.meta.IHiveMetaStore) DataXCfgGenerator(com.qlangtech.tis.datax.impl.DataXCfgGenerator) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) File(java.io.File) IHiveConnGetter(com.qlangtech.tis.config.hive.IHiveConnGetter) HiveTable(com.qlangtech.tis.config.hive.meta.HiveTable) Test(org.junit.Test)

Example 2 with IExecChainContext

use of com.qlangtech.tis.exec.IExecChainContext in project tis by qlangtech.

the class TestDataXExecuteInterceptor method executeJobTrigger.

private void executeJobTrigger(IRemoteTaskTrigger jobTrigger, boolean finalSuccess) throws Exception {
    int testTaskId = 999;
    TrackableExecuteInterceptor.initialTaskPhase(testTaskId);
    DataXJobSubmit.mockGetter = () -> new TestIndexSwapTaskflowLauncherWithDataXTrigger.MockDataXJobSubmit(jobTrigger);
    DataXExecuteInterceptor executeInterceptor = new DataXExecuteInterceptor();
    // @Override
    // protected IRemoteJobTrigger createDataXJob(DataXJobSubmit.IDataXJobContext execChainContext
    // , DataXJobSubmit submit, DataXJobSubmit.InstanceType expectDataXJobSumit
    // , RpcServiceReference statusRpc, DataxProcessor appSource, String fileName) {
    // assertEquals(dataCfgFileName, fileName);
    // return jobTrigger;
    // }
    // };
    IExecChainContext execChainContext = mock("execChainContext", IExecChainContext.class);
    execChainContext.rebindLoggingMDCParams();
    EasyMock.expect(execChainContext.getIndexName()).andReturn(AP_NAME);
    EasyMock.expect(execChainContext.getTaskId()).andReturn(testTaskId).anyTimes();
    // getTaskId
    MockDataxProcessor dataxProcessor = new MockDataxProcessor();
    EasyMock.expect(execChainContext.getAppSource()).andReturn(dataxProcessor);
    this.replay();
    ExecuteResult executeResult = executeInterceptor.execute(execChainContext);
    assertEquals("execute must be " + (finalSuccess ? "success" : "faild"), finalSuccess, executeResult.isSuccess());
    this.verifyAll();
}
Also used : TestIndexSwapTaskflowLauncherWithDataXTrigger(com.qlangtech.tis.order.center.TestIndexSwapTaskflowLauncherWithDataXTrigger) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) ExecuteResult(com.qlangtech.tis.exec.ExecuteResult)

Example 3 with IExecChainContext

use of com.qlangtech.tis.exec.IExecChainContext in project plugins by qlangtech.

the class TestLocalDataXJobSubmit method testCreateDataXJob.

public void testCreateDataXJob() throws Exception {
    Optional<DataXJobSubmit> dataXJobSubmit = DataXJobSubmit.getDataXJobSubmit(DataXJobSubmit.InstanceType.LOCAL);
    Assert.assertTrue("dataXJobSubmit shall present", dataXJobSubmit.isPresent());
    LocalDataXJobSubmit jobSubmit = (LocalDataXJobSubmit) dataXJobSubmit.get();
    jobSubmit.setMainClassName(LocalDataXJobMainEntrypoint.class.getName());
    jobSubmit.setWorkingDirectory(new File("."));
    jobSubmit.setClasspath("target/classes:target/test-classes");
    AtomicReference<ITISRpcService> ref = new AtomicReference<>();
    ref.set(StatusRpcClient.AssembleSvcCompsite.MOCK_PRC);
    RpcServiceReference statusRpc = new RpcServiceReference(ref);
    DataXJobSubmit.IDataXJobContext dataXJobContext = EasyMock.createMock("dataXJobContext", DataXJobSubmit.IDataXJobContext.class);
    IExecChainContext taskContext = EasyMock.createMock("taskContext", IExecChainContext.class);
    EasyMock.expect(dataXJobContext.getTaskContext()).andReturn(taskContext).anyTimes();
    IDataxProcessor dataxProcessor = EasyMock.createMock("dataxProcessor", IDataxProcessor.class);
    EasyMock.expect(taskContext.getIndexName()).andReturn(dataXName).anyTimes();
    EasyMock.expect(taskContext.getTaskId()).andReturn(TaskId).anyTimes();
    int preSuccessTaskId = 99;
    PhaseStatusCollection preSuccessTask = new PhaseStatusCollection(preSuccessTaskId, new ExecutePhaseRange(FullbuildPhase.FullDump, FullbuildPhase.FullDump));
    DumpPhaseStatus preDumpStatus = new DumpPhaseStatus(preSuccessTaskId);
    DumpPhaseStatus.TableDumpStatus tableDumpStatus = preDumpStatus.getTable(dataXfileName);
    tableDumpStatus.setAllRows(LocalDataXJobMainEntrypoint.testAllRows);
    preSuccessTask.setDumpPhase(preDumpStatus);
    EasyMock.expect(taskContext.loadPhaseStatusFromLatest(dataXName)).andReturn(preSuccessTask).times(3);
    TisZkClient zkClient = EasyMock.createMock("TisZkClient", TisZkClient.class);
    String zkSubPath = "nodes0000000020";
    EasyMock.expect(zkClient.getChildren(ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH, null, true)).andReturn(Collections.singletonList(zkSubPath)).times(3);
    EasyMock.expect(zkClient.getData(EasyMock.eq(ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH + "/" + zkSubPath), EasyMock.isNull(), EasyMock.anyObject(Stat.class), EasyMock.eq(true))).andReturn(statusCollectorHost.getBytes(TisUTF8.get())).times(3);
    EasyMock.expect(taskContext.getZkClient()).andReturn(zkClient).anyTimes();
    EasyMock.replay(taskContext, dataxProcessor, zkClient, dataXJobContext);
    IRemoteTaskTrigger dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
    RunningStatus running = getRunningStatus(dataXJob);
    assertTrue("running.isSuccess", running.isSuccess());
    jobSubmit.setMainClassName(LocalDataXJobMainEntrypointThrowException.class.getName());
    dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
    running = getRunningStatus(dataXJob);
    assertFalse("shall faild", running.isSuccess());
    assertTrue("shall complete", running.isComplete());
    jobSubmit.setMainClassName(LocalDataXJobMainEntrypointCancellable.class.getName());
    dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
    running = getRunningStatus(dataXJob, false);
    Thread.sleep(2000);
    dataXJob.cancel();
    int i = 0;
    while (i++ < 3 && !(running = dataXJob.getRunningStatus()).isComplete()) {
        Thread.sleep(1000);
    }
    assertFalse("shall faild", running.isSuccess());
    assertTrue("shall complete", running.isComplete());
    EasyMock.verify(taskContext, dataxProcessor, zkClient);
}
Also used : DataXJobSubmit(com.qlangtech.tis.datax.DataXJobSubmit) IRemoteTaskTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteTaskTrigger) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) AtomicReference(java.util.concurrent.atomic.AtomicReference) RpcServiceReference(com.tis.hadoop.rpc.RpcServiceReference) DumpPhaseStatus(com.qlangtech.tis.fullbuild.phasestatus.impl.DumpPhaseStatus) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) PhaseStatusCollection(com.qlangtech.tis.fullbuild.phasestatus.PhaseStatusCollection) RunningStatus(com.qlangtech.tis.fullbuild.indexbuild.RunningStatus) ExecutePhaseRange(com.qlangtech.tis.exec.ExecutePhaseRange) ITISRpcService(com.tis.hadoop.rpc.ITISRpcService) File(java.io.File) TisZkClient(com.qlangtech.tis.TisZkClient)

Example 4 with IExecChainContext

use of com.qlangtech.tis.exec.IExecChainContext in project plugins by qlangtech.

the class TaskExec method getRemoteJobTrigger.

static IRemoteTaskTrigger getRemoteJobTrigger(DataXJobSubmit.IDataXJobContext jobContext, LocalDataXJobSubmit localDataXJobSubmit, String dataXfileName) {
    // final JarLoader uberClassLoader = new TISJarLoader(pluginManager);
    IJoinTaskContext taskContext = jobContext.getTaskContext();
    AtomicBoolean complete = new AtomicBoolean(false);
    AtomicBoolean success = new AtomicBoolean(false);
    return new IRemoteTaskTrigger() {

        DataXJobSingleProcessorExecutor jobConsumer;

        boolean hasCanceled;

        // final ExecutorService dataXExecutor = jobContext.getContextInstance();
        @Override
        public void run() {
            // dataXExecutor.submit(() -> {
            try {
                MDC.put(IParamContext.KEY_TASK_ID, String.valueOf(taskContext.getTaskId()));
                MDC.put(TISCollectionUtils.KEY_COLLECTION, taskContext.getIndexName());
                jobConsumer = new DataXJobSingleProcessorExecutor() {

                    @Override
                    protected DataXJobSubmit.InstanceType getExecMode() {
                        return DataXJobSubmit.InstanceType.LOCAL;
                    }

                    @Override
                    protected String getClasspath() {
                        return localDataXJobSubmit.getClasspath();
                    }

                    @Override
                    protected boolean useRuntimePropEnvProps() {
                        return false;
                    }

                    @Override
                    protected String[] getExtraJavaSystemPrams() {
                        return new String[] { // "-D" + SYSTEM_KEY_LOGBACK_PATH_KEY + "=" + SYSTEM_KEY_LOGBACK_PATH_VALUE
                        "-D" + CenterResource.KEY_notFetchFromCenterRepository + "=true" };
                    }

                    @Override
                    protected String getIncrStateCollectAddress() {
                        return ZkUtils.getFirstChildValue(((IExecChainContext) taskContext).getZkClient(), ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH);
                    }

                    @Override
                    protected String getMainClassName() {
                        return localDataXJobSubmit.getMainClassName();
                    }

                    @Override
                    protected File getWorkingDirectory() {
                        return localDataXJobSubmit.getWorkingDirectory();
                    }
                };
                CuratorDataXTaskMessage dataXJob = localDataXJobSubmit.getDataXJobDTO(taskContext, dataXfileName);
                // new CuratorDataXTaskMessage();
                // dataXJob.setJobId(taskContext.getTaskId());
                // dataXJob.setJobName(dataXfileName);
                // dataXJob.setDataXName(taskContext.getIndexName());
                jobConsumer.consumeMessage(dataXJob);
                success.set(true);
            } catch (Throwable e) {
                // e.printStackTrace();
                success.set(false);
                if (this.hasCanceled) {
                    logger.warn("datax:" + taskContext.getIndexName() + " has been canceled");
                } else {
                    logger.error("datax:" + taskContext.getIndexName() + ",jobName:" + dataXfileName, e);
                    if (!(e instanceof DataXJobSingleProcessorException)) {
                        throw new RuntimeException(e);
                    }
                }
            } finally {
                complete.set(true);
            // shutdownExecutor();
            }
        // });
        }

        // private void shutdownExecutor() {
        // try {
        // dataXExecutor.shutdownNow();
        // } catch (Throwable e) {
        // logger.error(e.getMessage(), e);
        // }
        // }
        @Override
        public String getTaskName() {
            return dataXfileName;
        }

        @Override
        public void cancel() {
            if (jobConsumer == null) {
                return;
            }
            jobConsumer.runningTask.forEach((taskId, watchdog) -> {
                watchdog.destroyProcess();
                logger.info("taskId:{} relevant task has been canceled", taskId);
            });
            // shutdownExecutor();
            this.hasCanceled = true;
        }

        @Override
        public RunningStatus getRunningStatus() {
            return new RunningStatus(0, complete.get(), success.get());
        }
    };
}
Also used : IRemoteTaskTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteTaskTrigger) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CuratorDataXTaskMessage(com.qlangtech.tis.datax.CuratorDataXTaskMessage) RunningStatus(com.qlangtech.tis.fullbuild.indexbuild.RunningStatus) DataXJobSingleProcessorException(com.qlangtech.tis.datax.DataXJobSingleProcessorException) DataXJobSingleProcessorExecutor(com.qlangtech.tis.datax.DataXJobSingleProcessorExecutor) IJoinTaskContext(com.qlangtech.tis.order.center.IJoinTaskContext) File(java.io.File)

Example 5 with IExecChainContext

use of com.qlangtech.tis.exec.IExecChainContext in project tis by qlangtech.

the class DataXExecuteInterceptor method execute.

@Override
protected ExecuteResult execute(IExecChainContext execChainContext) throws Exception {
    int nThreads = 2;
    final ExecutorService executorService = new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(DataXJobSubmit.MAX_TABS_NUM_IN_PER_JOB), Executors.defaultThreadFactory());
    final Map<String, TISReactor.TaskAndMilestone> taskMap = Maps.newHashMap();
    RpcServiceReference statusRpc = getDataXExecReporter();
    DataxProcessor appSource = execChainContext.getAppSource();
    IRemoteTaskTrigger jobTrigger = null;
    RunningStatus runningStatus = null;
    List<IRemoteTaskTrigger> triggers = Lists.newArrayList();
    List<File> cfgFileNames = appSource.getDataxCfgFileNames(null);
    if (CollectionUtils.isEmpty(cfgFileNames)) {
        throw new IllegalStateException("dataX cfgFileNames can not be empty");
    }
    DataXJobSubmit.InstanceType expectDataXJobSumit = getDataXTriggerType();
    Optional<DataXJobSubmit> jobSubmit = DataXJobSubmit.getDataXJobSubmit(expectDataXJobSumit);
    // 如果分布式worker ready的话
    if (!jobSubmit.isPresent()) {
        throw new IllegalStateException("can not find expect jobSubmit by type:" + expectDataXJobSumit);
    }
    DataXJobSubmit submit = jobSubmit.get();
    final DataXJobSubmit.IDataXJobContext dataXJobContext = submit.createJobContext(execChainContext);
    Objects.requireNonNull(dataXJobContext, "dataXJobContext can not be null");
    try {
        DumpPhaseStatus dumpStatus = this.getPhaseStatus(execChainContext, FullbuildPhase.FullDump);
        for (File fileName : cfgFileNames) {
            jobTrigger = createDataXJob(dataXJobContext, submit, expectDataXJobSumit, statusRpc, appSource, fileName.getName());
            triggers.add(jobTrigger);
            taskMap.put(fileName.getName(), new TISReactor.TaskAndMilestone(DataflowTask.createDumpTask(jobTrigger)));
            // StatusRpcClient.AssembleSvcCompsite svc = statusRpc.get();
            // 将任务注册,可供页面展示
            // svc.reportDumpJobStatus(false, false, true, execChainContext.getTaskId()
            // , fileName.getName(), 0, 0);
            dumpStatus.getTable(fileName.getName()).setWaiting(true);
        }
        logger.info("trigger dataX jobs by mode:{},with:{}", this.getDataXTriggerType(), cfgFileNames.stream().map((f) -> f.getName()).collect(Collectors.joining(",")));
        // for (IRemoteJobTrigger t : triggers) {
        // t.submitJob();
        // }
        IDataxReader reader = appSource.getReader(null);
        List<ISelectedTab> selectedTabs = reader.getSelectedTabs();
        // Map<String, IDataxProcessor.TableAlias> tabAlias = appSource.getTabAlias();
        IDataxWriter writer = appSource.getWriter(null);
        if (writer instanceof IDataXBatchPost) {
            IDataXBatchPost batchPostTask = (IDataXBatchPost) writer;
            JoinPhaseStatus phaseStatus = this.getPhaseStatus(execChainContext, FullbuildPhase.JOIN);
            for (ISelectedTab entry : selectedTabs) {
                IRemoteTaskTrigger postTaskTrigger = batchPostTask.createPostTask(execChainContext, entry);
                triggers.add(postTaskTrigger);
                JoinPhaseStatus.JoinTaskStatus taskStatus = phaseStatus.getTaskStatus(postTaskTrigger.getTaskName());
                taskStatus.setWaiting(true);
                taskMap.put(postTaskTrigger.getTaskName(), new TISReactor.TaskAndMilestone(createJoinTask(postTaskTrigger, taskStatus)));
            }
        }
        // example: "->a ->b a,b->c"
        String dagSessionSpec = triggers.stream().map((trigger) -> {
            List<String> dpts = trigger.getTaskDependencies();
            return dpts.stream().collect(Collectors.joining(",")) + "->" + trigger.getTaskName();
        }).collect(Collectors.joining(" "));
        logger.info("dataX:{} of dagSessionSpec:{}", execChainContext.getIndexName(), dagSessionSpec);
        ExecuteResult[] faildResult = new ExecuteResult[] { ExecuteResult.createSuccess() };
        this.executeDAG(executorService, execChainContext, dagSessionSpec, taskMap, new ReactorListener() {

            @Override
            public void onTaskCompleted(Task t) {
            // dumpPhaseStatus.isComplete();
            // joinPhaseStatus.isComplete();
            }

            @Override
            public void onTaskFailed(Task t, Throwable err, boolean fatal) {
                logger.error(t.getDisplayName(), err);
                faildResult[0] = ExecuteResult.createFaild().setMessage("status.runningStatus.isComplete():" + err.getMessage());
                if (err instanceof InterruptedException) {
                    logger.warn("DataX Name:{},taskid:{} has been canceled", execChainContext.getIndexName(), execChainContext.getTaskId());
                    // this job has been cancel, trigger from TisServlet.doDelete()
                    for (IRemoteTaskTrigger tt : triggers) {
                        try {
                            tt.cancel();
                        } catch (Throwable ex) {
                        }
                    }
                }
            }
        });
        // ExecuteResult result = new ExecuteResult(!faild);
        for (IRemoteTaskTrigger trigger : triggers) {
            if (trigger.isAsyn()) {
                execChainContext.addAsynSubJob(new IExecChainContext.AsynSubJob(trigger.getAsynJobName()));
            }
        }
        return faildResult[0];
    } finally {
        try {
            dataXJobContext.destroy();
        } catch (Throwable e) {
            logger.error(e.getMessage(), e);
        }
    }
}
Also used : ExecuteResult(com.qlangtech.tis.exec.ExecuteResult) java.util(java.util) IDataxWriter(com.qlangtech.tis.datax.IDataxWriter) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) LoggerFactory(org.slf4j.LoggerFactory) AtomicReference(java.util.concurrent.atomic.AtomicReference) RpcServiceReference(com.tis.hadoop.rpc.RpcServiceReference) Lists(com.google.common.collect.Lists) DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) CollectionUtils(org.apache.commons.collections.CollectionUtils) DataXJobSubmit(com.qlangtech.tis.datax.DataXJobSubmit) JoinPhaseStatus(com.qlangtech.tis.fullbuild.phasestatus.impl.JoinPhaseStatus) IDataxReader(com.qlangtech.tis.datax.IDataxReader) AdapterStatusUmbilicalProtocol(com.qlangtech.tis.realtime.yarn.rpc.impl.AdapterStatusUmbilicalProtocol) DumpPhaseStatus(com.qlangtech.tis.fullbuild.phasestatus.impl.DumpPhaseStatus) Task(org.jvnet.hudson.reactor.Task) IDataXBatchPost(com.qlangtech.tis.datax.IDataXBatchPost) TrackableExecuteInterceptor(com.qlangtech.tis.exec.impl.TrackableExecuteInterceptor) FullbuildPhase(com.qlangtech.tis.assemble.FullbuildPhase) IncrStatusUmbilicalProtocolImpl(com.qlangtech.tis.rpc.server.IncrStatusUmbilicalProtocolImpl) IRemoteTaskTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteTaskTrigger) Logger(org.slf4j.Logger) java.util.concurrent(java.util.concurrent) IncrStatusUmbilicalProtocol(com.qlangtech.tis.realtime.yarn.rpc.IncrStatusUmbilicalProtocol) ReactorListener(org.jvnet.hudson.reactor.ReactorListener) Maps(com.google.common.collect.Maps) Collectors(java.util.stream.Collectors) File(java.io.File) RunningStatus(com.qlangtech.tis.fullbuild.indexbuild.RunningStatus) DataflowTask(com.qlangtech.tis.fullbuild.taskflow.DataflowTask) TISReactor(com.qlangtech.tis.fullbuild.taskflow.TISReactor) ITISRpcService(com.tis.hadoop.rpc.ITISRpcService) Task(org.jvnet.hudson.reactor.Task) DataflowTask(com.qlangtech.tis.fullbuild.taskflow.DataflowTask) DataXJobSubmit(com.qlangtech.tis.datax.DataXJobSubmit) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) RpcServiceReference(com.tis.hadoop.rpc.RpcServiceReference) DumpPhaseStatus(com.qlangtech.tis.fullbuild.phasestatus.impl.DumpPhaseStatus) IDataXBatchPost(com.qlangtech.tis.datax.IDataXBatchPost) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) IDataxReader(com.qlangtech.tis.datax.IDataxReader) DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) RunningStatus(com.qlangtech.tis.fullbuild.indexbuild.RunningStatus) ExecuteResult(com.qlangtech.tis.exec.ExecuteResult) IRemoteTaskTrigger(com.qlangtech.tis.fullbuild.indexbuild.IRemoteTaskTrigger) IDataxWriter(com.qlangtech.tis.datax.IDataxWriter) JoinPhaseStatus(com.qlangtech.tis.fullbuild.phasestatus.impl.JoinPhaseStatus) TISReactor(com.qlangtech.tis.fullbuild.taskflow.TISReactor) ReactorListener(org.jvnet.hudson.reactor.ReactorListener) File(java.io.File)

Aggregations

IExecChainContext (com.qlangtech.tis.exec.IExecChainContext)6 File (java.io.File)4 IRemoteTaskTrigger (com.qlangtech.tis.fullbuild.indexbuild.IRemoteTaskTrigger)3 RunningStatus (com.qlangtech.tis.fullbuild.indexbuild.RunningStatus)3 DataXJobSubmit (com.qlangtech.tis.datax.DataXJobSubmit)2 IDataxProcessor (com.qlangtech.tis.datax.IDataxProcessor)2 DataxProcessor (com.qlangtech.tis.datax.impl.DataxProcessor)2 ExecuteResult (com.qlangtech.tis.exec.ExecuteResult)2 DumpPhaseStatus (com.qlangtech.tis.fullbuild.phasestatus.impl.DumpPhaseStatus)2 ITISRpcService (com.tis.hadoop.rpc.ITISRpcService)2 RpcServiceReference (com.tis.hadoop.rpc.RpcServiceReference)2 AtomicReference (java.util.concurrent.atomic.AtomicReference)2 Lists (com.google.common.collect.Lists)1 Maps (com.google.common.collect.Maps)1 TisZkClient (com.qlangtech.tis.TisZkClient)1 FullbuildPhase (com.qlangtech.tis.assemble.FullbuildPhase)1 IHiveConnGetter (com.qlangtech.tis.config.hive.IHiveConnGetter)1 HiveTable (com.qlangtech.tis.config.hive.meta.HiveTable)1 IHiveMetaStore (com.qlangtech.tis.config.hive.meta.IHiveMetaStore)1 CuratorDataXTaskMessage (com.qlangtech.tis.datax.CuratorDataXTaskMessage)1