use of com.qlangtech.tis.plugin.ds.mangodb.MangoDBDataSourceFactory in project plugins by qlangtech.
the class TestDataXMongodbReader method testTemplateGenerate.
public void testTemplateGenerate() throws Exception {
String dataXName = "testDataXName";
MangoDBDataSourceFactory dsFactory = getDataSourceFactory();
DataXMongodbReader reader = new DataXMongodbReader() {
@Override
public MangoDBDataSourceFactory getDsFactory() {
return dsFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXMongodbReader.class;
}
};
reader.column = IOUtils.loadResourceFromClasspath(this.getClass(), "mongodb-reader-column.json");
reader.query = "this is my query";
reader.collectionName = "employee";
reader.template = DataXMongodbReader.getDftTemplate();
ReaderTemplate.validateDataXReader("mongodb-datax-reader-assert.json", dataXName, reader);
reader.query = null;
dsFactory.password = null;
ReaderTemplate.validateDataXReader("mongodb-datax-reader-assert-without-option.json", dataXName, reader);
}
use of com.qlangtech.tis.plugin.ds.mangodb.MangoDBDataSourceFactory in project plugins by qlangtech.
the class FlinkCDCMongoDBSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
DataXMongodbReader mongoReader = (DataXMongodbReader) dataSource;
MangoDBDataSourceFactory dsFactory = mongoReader.getDsFactory();
List<ReaderSource> sourceFunctions = Lists.newArrayList();
MongoDBSource.Builder<DTO> builder = MongoDBSource.<DTO>builder().hosts(dsFactory.address).database(dsFactory.dbName).collection(mongoReader.collectionName).connectionOptions(sourceFactory.connectionOptions).errorsTolerance(sourceFactory.errorsTolerance).username(dsFactory.getUserName()).password(dsFactory.getPassword()).deserializer(new TISDeserializationSchema());
if (sourceFactory.errorsLogEnable != null) {
builder.errorsLogEnable(sourceFactory.errorsLogEnable);
}
if (sourceFactory.copyExisting != null) {
builder.copyExisting(sourceFactory.copyExisting);
}
if (sourceFactory.copyExistingMaxThreads != null) {
builder.copyExistingMaxThreads(sourceFactory.copyExistingMaxThreads);
}
if (sourceFactory.copyExistingQueueSize != null) {
builder.copyExistingMaxThreads(sourceFactory.copyExistingQueueSize);
}
if (sourceFactory.pollMaxBatchSize != null) {
builder.copyExistingMaxThreads(sourceFactory.pollMaxBatchSize);
}
if (sourceFactory.pollAwaitTimeMillis != null) {
builder.copyExistingMaxThreads(sourceFactory.pollAwaitTimeMillis);
}
if (sourceFactory.heartbeatIntervalMillis != null) {
builder.copyExistingMaxThreads(sourceFactory.heartbeatIntervalMillis);
}
SourceFunction<DTO> source = builder.build();
// MongoDBSource.<DTO>builder()
// .hosts(dsFactory.address)
// .database(dsFactory.dbName)
// .collection(mongoReader.collectionName)
// .connectionOptions(sourceFactory.connectionOptions)
// .errorsTolerance(sourceFactory.errorsTolerance)
// .errorsLogEnable(sourceFactory.errorsLogEnable)
// .copyExisting(sourceFactory.copyExisting)
// .copyExistingPipeline(sourceFactory.copyExistingPipeline)
// .copyExistingMaxThreads(sourceFactory.copyExistingMaxThreads)
// .copyExistingQueueSize(sourceFactory.copyExistingQueueSize)
// .pollMaxBatchSize(sourceFactory.pollMaxBatchSize)
// .pollAwaitTimeMillis(sourceFactory.pollAwaitTimeMillis)
// .heartbeatIntervalMillis(sourceFactory.heartbeatIntervalMillis)
// //.port(dsFactory.port)
// // .databaseList(dbs.toArray(new String[dbs.size()])) // monitor all tables under inventory database
// // .tableList(tbs.toArray(new String[tbs.size()]))
// .username(dsFactory.getUserName())
// .password(dsFactory.getPassword())
// // .startupOptions(sourceFactory.getStartupOptions())
// //.debeziumProperties(debeziumProperties)
// .deserializer(new TISDeserializationSchema()) // converts SourceRecord to JSON String
// .build();
sourceFunctions.add(new ReaderSource(dsFactory.address + "_" + dsFactory.dbName + "_" + mongoReader.collectionName, source));
SourceChannel sourceChannel = new SourceChannel(sourceFunctions);
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
use of com.qlangtech.tis.plugin.ds.mangodb.MangoDBDataSourceFactory in project plugins by qlangtech.
the class TestDataXMongodbReader method getDataSourceFactory.
public static MangoDBDataSourceFactory getDataSourceFactory() {
MangoDBDataSourceFactory dsFactory = new MangoDBDataSourceFactory();
dsFactory.dbName = "order1";
dsFactory.address = "192.168.28.200:27017;192.168.28.201:27017";
dsFactory.password = "123456";
dsFactory.username = "root";
return dsFactory;
}
use of com.qlangtech.tis.plugin.ds.mangodb.MangoDBDataSourceFactory in project plugins by qlangtech.
the class TestDataXMongodbWriter method testTemplateGenerate.
public void testTemplateGenerate() throws Exception {
MangoDBDataSourceFactory dsFactory = TestDataXMongodbReader.getDataSourceFactory();
DataXMongodbWriter writer = new DataXMongodbWriter() {
@Override
public MangoDBDataSourceFactory getDsFactory() {
return dsFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXMongodbWriter.class;
}
};
writer.collectionName = "employee";
writer.column = IOUtils.loadResourceFromClasspath(this.getClass(), "mongodb-reader-column.json");
writer.template = DataXMongodbWriter.getDftTemplate();
writer.dbName = "order1";
writer.dataXName = "mongodb_doris";
writer.upsertInfo = "{\"isUpsert\":true,\"upsertKey\":\"user_id\"}";
// IDataxProcessor.TableMap tableMap = new IDataxProcessor.TableMap();
WriterTemplate.valiateCfgGenerate("mongodb-datax-writer-assert.json", writer, null);
dsFactory.username = null;
dsFactory.password = null;
writer.upsertInfo = null;
WriterTemplate.valiateCfgGenerate("mongodb-datax-writer-assert-without-option.json", writer, null);
}
Aggregations