use of com.qlangtech.tis.plugin.ds.ISelectedTab in project plugins by qlangtech.
the class TestElasticSearchSinkFactory method testCreateSinkFunction.
/**
* 参考:ElasticsearchSinkTestBase
*
* @throws Exception
*/
@Test
public void testCreateSinkFunction() throws Exception {
String tableName = "totalpayinfo";
String colEntityId = "entity_id";
String colNum = "num";
String colId = "id";
String colCreateTime = "create_time";
IDataxProcessor dataxProcessor = mock("dataxProcessor", IDataxProcessor.class);
IDataxReader dataxReader = mock("dataxReader", IDataxReader.class);
List<ISelectedTab> selectedTabs = Lists.newArrayList();
SelectedTab totalpayinfo = mock(tableName, SelectedTab.class);
EasyMock.expect(totalpayinfo.getName()).andReturn(tableName);
List<ISelectedTab.ColMeta> cols = Lists.newArrayList();
ISelectedTab.ColMeta cm = new ISelectedTab.ColMeta();
cm.setName(colEntityId);
cm.setType(new DataType(Types.VARCHAR, 6));
cols.add(cm);
cm = new ISelectedTab.ColMeta();
cm.setName(colNum);
cm.setType(new DataType(Types.INTEGER));
cols.add(cm);
cm = new ISelectedTab.ColMeta();
cm.setName(colId);
cm.setType(new DataType(Types.VARCHAR, 32));
cm.setPk(true);
cols.add(cm);
cm = new ISelectedTab.ColMeta();
cm.setName(colCreateTime);
cm.setType(new DataType(Types.BIGINT));
cols.add(cm);
EasyMock.expect(totalpayinfo.getCols()).andReturn(cols).anyTimes();
selectedTabs.add(totalpayinfo);
EasyMock.expect(dataxReader.getSelectedTabs()).andReturn(selectedTabs);
EasyMock.expect(dataxProcessor.getReader(null)).andReturn(dataxReader);
DataXElasticsearchWriter dataXWriter = mock("dataXWriter", DataXElasticsearchWriter.class);
ESTableAlias esTableAlias = new ESTableAlias();
dataXWriter.initialIndex(esTableAlias);
EasyMock.expect(dataxProcessor.getWriter(null)).andReturn(dataXWriter);
Map<String, IDataxProcessor.TableAlias> aliasMap = new HashMap<>();
IDataxProcessor.TableAlias tab = new IDataxProcessor.TableAlias(tableName);
aliasMap.put(tableName, tab);
EasyMock.expect(dataxProcessor.getTabAlias()).andReturn(aliasMap);
this.replay();
ElasticSearchSinkFactory clickHouseSinkFactory = new ElasticSearchSinkFactory();
Map<IDataxProcessor.TableAlias, SinkFunction<DTO>> sinkFuncs = clickHouseSinkFactory.createSinkFunction(dataxProcessor);
Assert.assertTrue("sinkFuncs must > 0", sinkFuncs.size() > 0);
// StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// env.setParallelism(1);
DTO d = new DTO();
d.setTableName(tableName);
d.setEventType(DTO.EventType.ADD);
Map<String, Object> after = Maps.newHashMap();
after.put(colEntityId, "334556");
after.put(colNum, "5");
after.put(colId, "123dsf124325253dsf123");
after.put(colCreateTime, "20211113115959");
d.setAfter(after);
Assert.assertEquals(1, sinkFuncs.size());
for (Map.Entry<IDataxProcessor.TableAlias, SinkFunction<DTO>> entry : sinkFuncs.entrySet()) {
// env.fromElements(new DTO[]{d}).addSink(entry.getValue()).name("clickhouse");
runElasticSearchSinkTest("elasticsearch-sink-test-json-index", entry.getValue());
break;
}
// env.execute("testJob");
Thread.sleep(5000);
this.verifyAll();
Client client = getClient();
}
use of com.qlangtech.tis.plugin.ds.ISelectedTab in project plugins by qlangtech.
the class TestDataXMongodbWriter method testDescriptorsJSONGenerate.
public void testDescriptorsJSONGenerate() {
DataxReader dataxReader = EasyMock.createMock("dataxReader", DataxReader.class);
List<ISelectedTab> selectedTabs = TestSelectedTabs.createSelectedTabs(1).stream().map((t) -> t).collect(Collectors.toList());
for (ISelectedTab tab : selectedTabs) {
for (ISelectedTab.ColMeta cm : tab.getCols()) {
cm.setType(DataXReaderColType.STRING.dataType);
}
}
EasyMock.expect(dataxReader.getSelectedTabs()).andReturn(selectedTabs).times(4);
DataxReader.dataxReaderThreadLocal.set(dataxReader);
EasyMock.replay(dataxReader);
DataXMongodbWriter writer = new DataXMongodbWriter();
assertTrue(writer instanceof IDataSourceFactoryGetter);
DescriptorsJSON descJson = new DescriptorsJSON(writer.getDescriptor());
JsonUtil.assertJSONEqual(DataXMongodbWriter.class, "mongdodb-datax-writer-descriptor.json", descJson.getDescriptorsJSON(), (m, e, a) -> {
assertEquals(m, e, a);
});
JsonUtil.assertJSONEqual(DataXMongodbWriter.class, "mongdodb-datax-writer-descriptor.json", descJson.getDescriptorsJSON(), (m, e, a) -> {
assertEquals(m, e, a);
});
EasyMock.verify(dataxReader);
}
use of com.qlangtech.tis.plugin.ds.ISelectedTab in project plugins by qlangtech.
the class TestDataXElasticsearchWriter method testInitSchemaMetaContent.
public void testInitSchemaMetaContent() {
DataXElasticsearchWriter dataXWriter = new DataXElasticsearchWriter();
ISelectedTab selectedTab = EasyMock.createMock("selectedTab", ISelectedTab.class);
// ISelectedTab
List<ISelectedTab.ColMeta> cols = Lists.newArrayList();
ISelectedTab.ColMeta col = null;
col = new ISelectedTab.ColMeta();
col.setName(null);
col.setType(DataXReaderColType.STRING.dataType);
cols.add(col);
col = new ISelectedTab.ColMeta();
col.setName(null);
col.setType(DataXReaderColType.Long.dataType);
cols.add(col);
EasyMock.expect(selectedTab.getCols()).andReturn(cols);
EasyMock.replay(selectedTab);
SchemaMetaContent metaContent = dataXWriter.initSchemaMetaContent(selectedTab);
// System.out.println(JsonUtil.toString(metaContent.toJSON()));
JsonUtil.assertJSONEqual(DataXElasticsearchWriter.class, "initSchemaMetaContent_assert.json", metaContent.toJSON(), (m, e, a) -> {
assertEquals(m, e, a);
});
EasyMock.verify(selectedTab);
}
use of com.qlangtech.tis.plugin.ds.ISelectedTab in project plugins by qlangtech.
the class FlinkCDCMongoDBSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
DataXMongodbReader mongoReader = (DataXMongodbReader) dataSource;
MangoDBDataSourceFactory dsFactory = mongoReader.getDsFactory();
List<ReaderSource> sourceFunctions = Lists.newArrayList();
MongoDBSource.Builder<DTO> builder = MongoDBSource.<DTO>builder().hosts(dsFactory.address).database(dsFactory.dbName).collection(mongoReader.collectionName).connectionOptions(sourceFactory.connectionOptions).errorsTolerance(sourceFactory.errorsTolerance).username(dsFactory.getUserName()).password(dsFactory.getPassword()).deserializer(new TISDeserializationSchema());
if (sourceFactory.errorsLogEnable != null) {
builder.errorsLogEnable(sourceFactory.errorsLogEnable);
}
if (sourceFactory.copyExisting != null) {
builder.copyExisting(sourceFactory.copyExisting);
}
if (sourceFactory.copyExistingMaxThreads != null) {
builder.copyExistingMaxThreads(sourceFactory.copyExistingMaxThreads);
}
if (sourceFactory.copyExistingQueueSize != null) {
builder.copyExistingMaxThreads(sourceFactory.copyExistingQueueSize);
}
if (sourceFactory.pollMaxBatchSize != null) {
builder.copyExistingMaxThreads(sourceFactory.pollMaxBatchSize);
}
if (sourceFactory.pollAwaitTimeMillis != null) {
builder.copyExistingMaxThreads(sourceFactory.pollAwaitTimeMillis);
}
if (sourceFactory.heartbeatIntervalMillis != null) {
builder.copyExistingMaxThreads(sourceFactory.heartbeatIntervalMillis);
}
SourceFunction<DTO> source = builder.build();
// MongoDBSource.<DTO>builder()
// .hosts(dsFactory.address)
// .database(dsFactory.dbName)
// .collection(mongoReader.collectionName)
// .connectionOptions(sourceFactory.connectionOptions)
// .errorsTolerance(sourceFactory.errorsTolerance)
// .errorsLogEnable(sourceFactory.errorsLogEnable)
// .copyExisting(sourceFactory.copyExisting)
// .copyExistingPipeline(sourceFactory.copyExistingPipeline)
// .copyExistingMaxThreads(sourceFactory.copyExistingMaxThreads)
// .copyExistingQueueSize(sourceFactory.copyExistingQueueSize)
// .pollMaxBatchSize(sourceFactory.pollMaxBatchSize)
// .pollAwaitTimeMillis(sourceFactory.pollAwaitTimeMillis)
// .heartbeatIntervalMillis(sourceFactory.heartbeatIntervalMillis)
// //.port(dsFactory.port)
// // .databaseList(dbs.toArray(new String[dbs.size()])) // monitor all tables under inventory database
// // .tableList(tbs.toArray(new String[tbs.size()]))
// .username(dsFactory.getUserName())
// .password(dsFactory.getPassword())
// // .startupOptions(sourceFactory.getStartupOptions())
// //.debeziumProperties(debeziumProperties)
// .deserializer(new TISDeserializationSchema()) // converts SourceRecord to JSON String
// .build();
sourceFunctions.add(new ReaderSource(dsFactory.address + "_" + dsFactory.dbName + "_" + mongoReader.collectionName, source));
SourceChannel sourceChannel = new SourceChannel(sourceFunctions);
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
use of com.qlangtech.tis.plugin.ds.ISelectedTab in project plugins by qlangtech.
the class FlinkCDCMysqlSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
// TabColIndexer colIndexer = new TabColIndexer(tabs);
// TISDeserializationSchema deserializationSchema
// = new TISDeserializationSchema(new MySQLSourceValConvert(colIndexer));
TISDeserializationSchema deserializationSchema = new TISDeserializationSchema();
BasicDataXRdbmsReader rdbmsReader = (BasicDataXRdbmsReader) dataSource;
BasicDataSourceFactory dsFactory = (BasicDataSourceFactory) rdbmsReader.getDataSourceFactory();
SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(dsFactory, tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
DateTimeConverter.setDatetimeConverters(MySqlDateTimeConverter.class.getName(), debeziumProperties);
String[] databases = dbs.toArray(new String[dbs.size()]);
return Collections.singletonList(new ReaderSource(dbHost + ":" + dsFactory.port + ":" + dbs.stream().collect(Collectors.joining("_")), MySqlSource.<DTO>builder().hostname(dbHost).port(dsFactory.port).databaseList(// monitor all tables under inventory database
databases).tableList(tbs.toArray(new String[tbs.size()])).serverTimeZone(BasicDataSourceFactory.DEFAULT_SERVER_TIME_ZONE.getId()).username(dsFactory.getUserName()).password(dsFactory.getPassword()).startupOptions(sourceFactory.getStartupOptions()).debeziumProperties(debeziumProperties).deserializer(// converts SourceRecord to JSON String
deserializationSchema).build()));
}));
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
Aggregations