Search in sources :

Example 6 with IDataxProcessor

use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.

the class TestElasticSearchSinkFactory method testCreateSinkFunction.

/**
 * 参考:ElasticsearchSinkTestBase
 *
 * @throws Exception
 */
@Test
public void testCreateSinkFunction() throws Exception {
    String tableName = "totalpayinfo";
    String colEntityId = "entity_id";
    String colNum = "num";
    String colId = "id";
    String colCreateTime = "create_time";
    IDataxProcessor dataxProcessor = mock("dataxProcessor", IDataxProcessor.class);
    IDataxReader dataxReader = mock("dataxReader", IDataxReader.class);
    List<ISelectedTab> selectedTabs = Lists.newArrayList();
    SelectedTab totalpayinfo = mock(tableName, SelectedTab.class);
    EasyMock.expect(totalpayinfo.getName()).andReturn(tableName);
    List<ISelectedTab.ColMeta> cols = Lists.newArrayList();
    ISelectedTab.ColMeta cm = new ISelectedTab.ColMeta();
    cm.setName(colEntityId);
    cm.setType(new DataType(Types.VARCHAR, 6));
    cols.add(cm);
    cm = new ISelectedTab.ColMeta();
    cm.setName(colNum);
    cm.setType(new DataType(Types.INTEGER));
    cols.add(cm);
    cm = new ISelectedTab.ColMeta();
    cm.setName(colId);
    cm.setType(new DataType(Types.VARCHAR, 32));
    cm.setPk(true);
    cols.add(cm);
    cm = new ISelectedTab.ColMeta();
    cm.setName(colCreateTime);
    cm.setType(new DataType(Types.BIGINT));
    cols.add(cm);
    EasyMock.expect(totalpayinfo.getCols()).andReturn(cols).anyTimes();
    selectedTabs.add(totalpayinfo);
    EasyMock.expect(dataxReader.getSelectedTabs()).andReturn(selectedTabs);
    EasyMock.expect(dataxProcessor.getReader(null)).andReturn(dataxReader);
    DataXElasticsearchWriter dataXWriter = mock("dataXWriter", DataXElasticsearchWriter.class);
    ESTableAlias esTableAlias = new ESTableAlias();
    dataXWriter.initialIndex(esTableAlias);
    EasyMock.expect(dataxProcessor.getWriter(null)).andReturn(dataXWriter);
    Map<String, IDataxProcessor.TableAlias> aliasMap = new HashMap<>();
    IDataxProcessor.TableAlias tab = new IDataxProcessor.TableAlias(tableName);
    aliasMap.put(tableName, tab);
    EasyMock.expect(dataxProcessor.getTabAlias()).andReturn(aliasMap);
    this.replay();
    ElasticSearchSinkFactory clickHouseSinkFactory = new ElasticSearchSinkFactory();
    Map<IDataxProcessor.TableAlias, SinkFunction<DTO>> sinkFuncs = clickHouseSinkFactory.createSinkFunction(dataxProcessor);
    Assert.assertTrue("sinkFuncs must > 0", sinkFuncs.size() > 0);
    // StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    // env.setParallelism(1);
    DTO d = new DTO();
    d.setTableName(tableName);
    d.setEventType(DTO.EventType.ADD);
    Map<String, Object> after = Maps.newHashMap();
    after.put(colEntityId, "334556");
    after.put(colNum, "5");
    after.put(colId, "123dsf124325253dsf123");
    after.put(colCreateTime, "20211113115959");
    d.setAfter(after);
    Assert.assertEquals(1, sinkFuncs.size());
    for (Map.Entry<IDataxProcessor.TableAlias, SinkFunction<DTO>> entry : sinkFuncs.entrySet()) {
        // env.fromElements(new DTO[]{d}).addSink(entry.getValue()).name("clickhouse");
        runElasticSearchSinkTest("elasticsearch-sink-test-json-index", entry.getValue());
        break;
    }
    // env.execute("testJob");
    Thread.sleep(5000);
    this.verifyAll();
    Client client = getClient();
}
Also used : HashMap(java.util.HashMap) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) DataXElasticsearchWriter(com.qlangtech.tis.plugin.datax.DataXElasticsearchWriter) IDataxReader(com.qlangtech.tis.datax.IDataxReader) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) SelectedTab(com.qlangtech.tis.plugin.datax.SelectedTab) DataType(com.qlangtech.tis.plugin.ds.DataType) Client(org.elasticsearch.client.Client) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) ESTableAlias(com.qlangtech.tis.datax.impl.ESTableAlias) ESTableAlias(com.qlangtech.tis.datax.impl.ESTableAlias) HashMap(java.util.HashMap) Map(java.util.Map) DTO(com.qlangtech.tis.realtime.transfer.DTO) Test(org.junit.Test)

Example 7 with IDataxProcessor

use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.

the class FlinkCDCMysqlSourceFunction method start.

@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
    try {
        // TabColIndexer colIndexer = new TabColIndexer(tabs);
        // TISDeserializationSchema deserializationSchema
        // = new TISDeserializationSchema(new MySQLSourceValConvert(colIndexer));
        TISDeserializationSchema deserializationSchema = new TISDeserializationSchema();
        BasicDataXRdbmsReader rdbmsReader = (BasicDataXRdbmsReader) dataSource;
        BasicDataSourceFactory dsFactory = (BasicDataSourceFactory) rdbmsReader.getDataSourceFactory();
        SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(dsFactory, tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
            DateTimeConverter.setDatetimeConverters(MySqlDateTimeConverter.class.getName(), debeziumProperties);
            String[] databases = dbs.toArray(new String[dbs.size()]);
            return Collections.singletonList(new ReaderSource(dbHost + ":" + dsFactory.port + ":" + dbs.stream().collect(Collectors.joining("_")), MySqlSource.<DTO>builder().hostname(dbHost).port(dsFactory.port).databaseList(// monitor all tables under inventory database
            databases).tableList(tbs.toArray(new String[tbs.size()])).serverTimeZone(BasicDataSourceFactory.DEFAULT_SERVER_TIME_ZONE.getId()).username(dsFactory.getUserName()).password(dsFactory.getPassword()).startupOptions(sourceFactory.getStartupOptions()).debeziumProperties(debeziumProperties).deserializer(// converts SourceRecord to JSON String
            deserializationSchema).build()));
        }));
        for (ISelectedTab tab : tabs) {
            sourceChannel.addFocusTab(tab.getName());
        }
        return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
    } catch (Exception e) {
        throw new MQConsumeException(e.getMessage(), e);
    }
}
Also used : MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException) DateTimeConverter(com.qlangtech.plugins.incr.flink.cdc.valconvert.DateTimeConverter) IConsumerHandle(com.qlangtech.tis.async.message.client.consumer.IConsumerHandle) TargetResName(com.qlangtech.tis.coredefine.module.action.TargetResName) BasicDataXRdbmsReader(com.qlangtech.tis.plugin.datax.common.BasicDataXRdbmsReader) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) DTO(com.qlangtech.tis.realtime.transfer.DTO) TISDeserializationSchema(com.qlangtech.plugins.incr.flink.cdc.TISDeserializationSchema) Collectors(java.util.stream.Collectors) BasicDataSourceFactory(com.qlangtech.tis.plugin.ds.BasicDataSourceFactory) SourceChannel(com.qlangtech.plugins.incr.flink.cdc.SourceChannel) ReaderSource(com.qlangtech.tis.realtime.ReaderSource) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) List(java.util.List) IDataxReader(com.qlangtech.tis.datax.IDataxReader) IAsyncMsgDeserialize(com.qlangtech.tis.async.message.client.consumer.IAsyncMsgDeserialize) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) IMQListener(com.qlangtech.tis.async.message.client.consumer.IMQListener) MySqlSource(com.ververica.cdc.connectors.mysql.MySqlSource) Collections(java.util.Collections) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) TISDeserializationSchema(com.qlangtech.plugins.incr.flink.cdc.TISDeserializationSchema) BasicDataSourceFactory(com.qlangtech.tis.plugin.ds.BasicDataSourceFactory) MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException) SourceChannel(com.qlangtech.plugins.incr.flink.cdc.SourceChannel) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) BasicDataXRdbmsReader(com.qlangtech.tis.plugin.datax.common.BasicDataXRdbmsReader) ReaderSource(com.qlangtech.tis.realtime.ReaderSource) DTO(com.qlangtech.tis.realtime.transfer.DTO) MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException)

Example 8 with IDataxProcessor

use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.

the class TestFlinkCDCMongoDBSourceFunction method testStart.

public void testStart() throws Exception {
    FlinkCDCMongoDBSourceFactory mongoDBSourceFactory = this.mock("mongoDBSourceFactory", FlinkCDCMongoDBSourceFactory.class);
    FlinkCDCMongoDBSourceFunction mongoDBSourceFunction = new FlinkCDCMongoDBSourceFunction(mongoDBSourceFactory);
    DataXMongodbReader mongodbReader = new DataXMongodbReader();
    List<ISelectedTab> tabs = Lists.newArrayList();
    IDataxProcessor dataXProcessor = this.mock("dataxProcess", IDataxProcessor.class);
    this.replay();
    TargetResName dataXName = new TargetResName("test");
    mongoDBSourceFunction.start(dataXName, mongodbReader, tabs, dataXProcessor);
    this.verifyAll();
}
Also used : TargetResName(com.qlangtech.tis.coredefine.module.action.TargetResName) DataXMongodbReader(com.qlangtech.tis.plugin.datax.DataXMongodbReader) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor)

Example 9 with IDataxProcessor

use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.

the class ElasticSearchSinkFactory method createSinkFunction.

@Override
public Map<IDataxProcessor.TableAlias, SinkFunction<DTO>> createSinkFunction(IDataxProcessor dataxProcessor) {
    DataXElasticsearchWriter dataXWriter = (DataXElasticsearchWriter) dataxProcessor.getWriter(null);
    Objects.requireNonNull(dataXWriter, "dataXWriter can not be null");
    IHttpToken token = dataXWriter.getToken();
    ESTableAlias esSchema = null;
    for (Map.Entry<String, IDataxProcessor.TableAlias> e : dataxProcessor.getTabAlias().entrySet()) {
        IDataxProcessor.TableAlias value = e.getValue();
        if (!(value instanceof ESTableAlias)) {
            throw new IllegalStateException("value must be type of 'ESTableAlias',but now is :" + value.getClass());
        }
        esSchema = (ESTableAlias) value;
        break;
    }
    Objects.requireNonNull(esSchema, "esSchema can not be null");
    List<ISelectedTab.ColMeta> cols = esSchema.getSourceCols();
    if (CollectionUtils.isEmpty(cols)) {
        throw new IllegalStateException("cols can not be null");
    }
    Optional<ISelectedTab.ColMeta> firstPK = cols.stream().filter((c) -> c.isPk()).findFirst();
    if (!firstPK.isPresent()) {
        throw new IllegalStateException("has not set PK col");
    }
    /**
     ******************************************************
     * 初始化索引Schema
     ******************************************************
     */
    dataXWriter.initialIndex(esSchema);
    // JSONArray schemaCols = esSchema.getSchemaCols();
    // ESClient esClient = new ESClient();
    // esClient.createClient(token.getEndpoint(),
    // token.getAccessKeyId(),
    // token.getAccessKeySecret(),
    // false,
    // 300000,
    // false,
    // false);
    // try {
    // esClient.createIndex(dataXWriter.getIndexName()
    // , dataXWriter.type
    // , esClient.genMappings(schemaCols, dataXWriter.type, (columnList) -> {
    // }), dataXWriter.settings, false);
    // } catch (Exception e) {
    // throw new RuntimeException(e);
    // } finally {
    // try {
    // esClient.closeJestClient();
    // } catch (Throwable e) {
    // 
    // }
    // }
    // if (!) {
    // throw new IllegalStateException("create index or mapping failed indexName:" + dataXWriter.getIndexName());
    // }
    // Map<String, String> config = new HashMap<>();
    // config.put("cluster.name", "my-cluster-name");
    // // This instructs the sink to emit after every element, otherwise they would be buffered
    // config.put("bulk.flush.max.actions", "1");
    List<HttpHost> transportAddresses = new ArrayList<>();
    transportAddresses.add(HttpHost.create(token.getEndpoint()));
    ElasticsearchSink.Builder<DTO> sinkBuilder = new ElasticsearchSink.Builder<>(transportAddresses, new DefaultElasticsearchSinkFunction(cols.stream().map((c) -> c.getName()).collect(Collectors.toSet()), firstPK.get().getName(), dataXWriter.getIndexName()));
    if (this.bulkFlushMaxActions != null) {
        sinkBuilder.setBulkFlushMaxActions(this.bulkFlushMaxActions);
    }
    if (this.bulkFlushMaxSizeMb != null) {
        sinkBuilder.setBulkFlushMaxSizeMb(bulkFlushMaxSizeMb);
    }
    if (this.bulkFlushIntervalMs != null) {
        sinkBuilder.setBulkFlushInterval(this.bulkFlushIntervalMs);
    }
    // new RestClientBuilder.HttpClientConfigCallback() {
    // @Override
    // public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
    // return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
    // }
    // }
    sinkBuilder.setFailureHandler(new DefaultActionRequestFailureHandler());
    if (StringUtils.isNotEmpty(token.getAccessKeyId()) || StringUtils.isNotEmpty(token.getAccessKeySecret())) {
        // 如果用户设置了accessKey 或者accessSecret
        sinkBuilder.setRestClientFactory(new TISElasticRestClientFactory(token.getAccessKeyId(), token.getAccessKeySecret()));
    // sinkBuilder.setRestClientFactory(new RestClientFactory() {
    // @Override
    // public void configureRestClientBuilder(RestClientBuilder restClientBuilder) {
    // final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
    // credentialsProvider.setCredentials(AuthScope.ANY,
    // new UsernamePasswordCredentials(token.getAccessKeyId(), token.getAccessKeySecret()));
    // restClientBuilder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
    // @Override
    // public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpAsyncClientBuilder) {
    // return httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
    // }
    // });
    // }
    // });
    }
    IDataxProcessor.TableAlias tableMapper = new IDataxProcessor.TableAlias();
    tableMapper.setTo(dataXWriter.getIndexName());
    IDataxReader reader = dataxProcessor.getReader(null);
    for (ISelectedTab selectedTab : reader.getSelectedTabs()) {
        tableMapper.setFrom(selectedTab.getName());
    }
    return Collections.singletonMap(tableMapper, sinkBuilder.build());
}
Also used : StringUtils(org.apache.commons.lang.StringUtils) ActionRequestFailureHandler(org.apache.flink.streaming.connectors.elasticsearch.ActionRequestFailureHandler) java.util(java.util) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) RequestIndexer(org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) LoggerFactory(org.slf4j.LoggerFactory) FormField(com.qlangtech.tis.plugin.annotation.FormField) DTO(com.qlangtech.tis.realtime.transfer.DTO) ElasticsearchSinkFunction(org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction) IndexRequest(org.elasticsearch.action.index.IndexRequest) CollectionUtils(org.apache.commons.collections.CollectionUtils) IDataXPluginMeta(com.qlangtech.tis.datax.IDataXPluginMeta) IDataxReader(com.qlangtech.tis.datax.IDataxReader) ESTableAlias(com.qlangtech.tis.datax.impl.ESTableAlias) Public(org.apache.flink.annotation.Public) Requests(org.elasticsearch.client.Requests) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) FormFieldType(com.qlangtech.tis.plugin.annotation.FormFieldType) Validator(com.qlangtech.tis.plugin.annotation.Validator) TISSinkFactory(com.qlangtech.tis.plugin.incr.TISSinkFactory) TISExtension(com.qlangtech.tis.extension.TISExtension) ActionRequest(org.elasticsearch.action.ActionRequest) ElasticsearchSink(org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink) Logger(org.slf4j.Logger) SinkFunction(org.apache.flink.streaming.api.functions.sink.SinkFunction) IHttpToken(com.qlangtech.tis.config.aliyun.IHttpToken) Collectors(java.util.stream.Collectors) Serializable(java.io.Serializable) DataXElasticsearchWriter(com.qlangtech.tis.plugin.datax.DataXElasticsearchWriter) HttpHost(com.qlangtech.org.apache.http.HttpHost) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) DataXElasticsearchWriter(com.qlangtech.tis.plugin.datax.DataXElasticsearchWriter) IDataxReader(com.qlangtech.tis.datax.IDataxReader) HttpHost(com.qlangtech.org.apache.http.HttpHost) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) ESTableAlias(com.qlangtech.tis.datax.impl.ESTableAlias) IHttpToken(com.qlangtech.tis.config.aliyun.IHttpToken) ESTableAlias(com.qlangtech.tis.datax.impl.ESTableAlias) ElasticsearchSink(org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink) DTO(com.qlangtech.tis.realtime.transfer.DTO)

Example 10 with IDataxProcessor

use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.

the class FlinkCDCOracleSourceFunction method start.

@Override
public JobExecutionResult start(TargetResName channalName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
    try {
        BasicDataXRdbmsReader reader = (BasicDataXRdbmsReader) dataSource;
        BasicDataSourceFactory f = (BasicDataSourceFactory) reader.getDataSourceFactory();
        SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(f, (tab) -> tab.getTabName(), tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
            return dbs.stream().map((databaseName) -> {
                SourceFunction<DTO> sourceFunction = OracleSource.<DTO>builder().hostname(dbHost).debeziumProperties(debeziumProperties).port(f.port).startupOptions(sourceFactory.getStartupOptions()).database(// monitor XE database
                StringUtils.upperCase(f.dbName)).tableList(// monitor products table
                tbs.toArray(new String[tbs.size()])).username(f.getUserName()).password(f.getPassword()).deserializer(// converts SourceRecord to JSON String
                new TISDeserializationSchema()).build();
                return new ReaderSource(dbHost + ":" + f.port + "_" + databaseName, sourceFunction);
            }).collect(Collectors.toList());
        }));
        for (ISelectedTab tab : tabs) {
            sourceChannel.addFocusTab(tab.getName());
        }
        return (JobExecutionResult) getConsumerHandle().consume(channalName, sourceChannel, dataXProcessor);
    } catch (Exception e) {
        throw new MQConsumeException(e.getMessage(), e);
    }
}
Also used : MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException) IConsumerHandle(com.qlangtech.tis.async.message.client.consumer.IConsumerHandle) StringUtils(org.apache.commons.lang.StringUtils) TargetResName(com.qlangtech.tis.coredefine.module.action.TargetResName) OracleSource(com.ververica.cdc.connectors.oracle.OracleSource) BasicDataXRdbmsReader(com.qlangtech.tis.plugin.datax.common.BasicDataXRdbmsReader) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) DTO(com.qlangtech.tis.realtime.transfer.DTO) TISDeserializationSchema(com.qlangtech.plugins.incr.flink.cdc.TISDeserializationSchema) Collectors(java.util.stream.Collectors) BasicDataSourceFactory(com.qlangtech.tis.plugin.ds.BasicDataSourceFactory) SourceChannel(com.qlangtech.plugins.incr.flink.cdc.SourceChannel) ReaderSource(com.qlangtech.tis.realtime.ReaderSource) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) List(java.util.List) SourceFunction(org.apache.flink.streaming.api.functions.source.SourceFunction) IDataxReader(com.qlangtech.tis.datax.IDataxReader) IAsyncMsgDeserialize(com.qlangtech.tis.async.message.client.consumer.IAsyncMsgDeserialize) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) IMQListener(com.qlangtech.tis.async.message.client.consumer.IMQListener) BasicDataSourceFactory(com.qlangtech.tis.plugin.ds.BasicDataSourceFactory) SourceChannel(com.qlangtech.plugins.incr.flink.cdc.SourceChannel) ISelectedTab(com.qlangtech.tis.plugin.ds.ISelectedTab) MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) TISDeserializationSchema(com.qlangtech.plugins.incr.flink.cdc.TISDeserializationSchema) MQConsumeException(com.qlangtech.tis.async.message.client.consumer.MQConsumeException) BasicDataXRdbmsReader(com.qlangtech.tis.plugin.datax.common.BasicDataXRdbmsReader) ReaderSource(com.qlangtech.tis.realtime.ReaderSource) DTO(com.qlangtech.tis.realtime.transfer.DTO)

Aggregations

IDataxProcessor (com.qlangtech.tis.datax.IDataxProcessor)15 ISelectedTab (com.qlangtech.tis.plugin.ds.ISelectedTab)11 IDataxReader (com.qlangtech.tis.datax.IDataxReader)10 DTO (com.qlangtech.tis.realtime.transfer.DTO)9 Collectors (java.util.stream.Collectors)6 SinkFunction (org.apache.flink.streaming.api.functions.sink.SinkFunction)6 StringUtils (org.apache.commons.lang.StringUtils)5 IMQListener (com.qlangtech.tis.async.message.client.consumer.IMQListener)4 TargetResName (com.qlangtech.tis.coredefine.module.action.TargetResName)4 SourceChannel (com.qlangtech.plugins.incr.flink.cdc.SourceChannel)3 TISDeserializationSchema (com.qlangtech.plugins.incr.flink.cdc.TISDeserializationSchema)3 IAsyncMsgDeserialize (com.qlangtech.tis.async.message.client.consumer.IAsyncMsgDeserialize)3 IConsumerHandle (com.qlangtech.tis.async.message.client.consumer.IConsumerHandle)3 MQConsumeException (com.qlangtech.tis.async.message.client.consumer.MQConsumeException)3 IDataXPluginMeta (com.qlangtech.tis.datax.IDataXPluginMeta)3 TISExtension (com.qlangtech.tis.extension.TISExtension)3 FormField (com.qlangtech.tis.plugin.annotation.FormField)3 FormFieldType (com.qlangtech.tis.plugin.annotation.FormFieldType)3 SelectedTab (com.qlangtech.tis.plugin.datax.SelectedTab)3 TISSinkFactory (com.qlangtech.tis.plugin.incr.TISSinkFactory)3