use of com.qlangtech.tis.plugin.ds.BasicDataSourceFactory in project plugins by qlangtech.
the class TestFlinkCDCMySQLSourceFactory method createMySqlDataSourceFactory.
protected BasicDataSourceFactory createMySqlDataSourceFactory(TargetResName dataxName) {
Descriptor mySqlV5DataSourceFactory = TIS.get().getDescriptor("MySQLV5DataSourceFactory");
Assert.assertNotNull(mySqlV5DataSourceFactory);
Descriptor.FormData formData = new Descriptor.FormData();
formData.addProp("name", "mysql");
formData.addProp("dbName", MYSQL_CONTAINER.getDatabaseName());
formData.addProp("nodeDesc", MYSQL_CONTAINER.getHost());
formData.addProp("password", MYSQL_CONTAINER.getPassword());
formData.addProp("userName", MYSQL_CONTAINER.getUsername());
formData.addProp("port", String.valueOf(MYSQL_CONTAINER.getDatabasePort()));
formData.addProp("encode", "utf8");
formData.addProp("useCompression", "true");
Descriptor.ParseDescribable<BasicDataSourceFactory> parseDescribable = mySqlV5DataSourceFactory.newInstance(dataxName.getName(), formData);
Assert.assertNotNull(parseDescribable.instance);
return parseDescribable.instance;
}
use of com.qlangtech.tis.plugin.ds.BasicDataSourceFactory in project plugins by qlangtech.
the class TestFlinkCDCMySQLSourceFactory method testBinlogConsumeWithDataStreamRegisterInstaneDetailTable.
/**
* 测试 instancedetail
*
* @throws Exception
*/
@Test
public void testBinlogConsumeWithDataStreamRegisterInstaneDetailTable() throws Exception {
FlinkCDCMySQLSourceFactory mysqlCDCFactory = new FlinkCDCMySQLSourceFactory();
mysqlCDCFactory.startupOptions = "latest";
final String tabName = "instancedetail";
CUDCDCTestSuit cdcTestSuit = new CUDCDCTestSuit() {
@Override
protected BasicDataSourceFactory createDataSourceFactory(TargetResName dataxName) {
return createMySqlDataSourceFactory(dataxName);
}
@Override
protected String getColEscape() {
return "`";
}
@Override
protected IResultRows createConsumerHandle(String tabName) {
return new TestTableRegisterFlinkSourceHandle(tabName, cols);
}
@Override
protected void verfiyTableCrudProcess(String tabName, BasicDataXRdbmsReader dataxReader, ISelectedTab tab, IResultRows consumerHandle, IMQListener<JobExecutionResult> imqListener) throws MQConsumeException, InterruptedException {
// super.verfiyTableCrudProcess(tabName, dataxReader, tab, consumerHandle, imqListener);
List<ISelectedTab> tabs = Collections.singletonList(tab);
List<TestRow> exampleRows = Lists.newArrayList();
exampleRows.add(this.parseTestRow(RowKind.INSERT, TestFlinkCDCMySQLSourceFactory.class, tabName + "/insert1.txt"));
Assert.assertEquals(1, exampleRows.size());
imqListener.start(dataxName, dataxReader, tabs, null);
Thread.sleep(1000);
CloseableIterator<Row> snapshot = consumerHandle.getRowSnapshot(tabName);
BasicDataSourceFactory dataSourceFactory = (BasicDataSourceFactory) dataxReader.getDataSourceFactory();
Assert.assertNotNull("dataSourceFactory can not be null", dataSourceFactory);
dataSourceFactory.visitFirstConnection((conn) -> {
startProcessConn(conn);
for (TestRow t : exampleRows) {
RowVals<Object> vals = t.vals;
final String insertBase = "insert into " + createTableName(tabName) + "(" + cols.stream().filter((c) -> vals.notNull(c.getName())).map((col) -> getColEscape() + col.getName() + getColEscape()).collect(Collectors.joining(" , ")) + ") " + "values(" + cols.stream().filter((c) -> vals.notNull(c.getName())).map((col) -> "?").collect(Collectors.joining(" , ")) + ")";
PreparedStatement statement = conn.prepareStatement(insertBase);
AtomicInteger ci = new AtomicInteger();
cols.stream().filter((c) -> vals.notNull(c.getName())).forEach((col) -> {
col.type.accept(new DataType.TypeVisitor<Void>() {
@Override
public Void longType(DataType type) {
try {
statement.setLong(ci.incrementAndGet(), Long.parseLong(vals.getString(col.getName())));
} catch (SQLException e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void doubleType(DataType type) {
try {
statement.setDouble(ci.incrementAndGet(), Double.parseDouble(vals.getString(col.getName())));
} catch (SQLException e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void dateType(DataType type) {
try {
statement.setDate(ci.incrementAndGet(), java.sql.Date.valueOf(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void timestampType(DataType type) {
try {
statement.setTimestamp(ci.incrementAndGet(), java.sql.Timestamp.valueOf(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void bitType(DataType type) {
try {
statement.setByte(ci.incrementAndGet(), Byte.parseByte(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void blobType(DataType type) {
try {
try (InputStream input = new ByteArrayInputStream(vals.getString(col.getName()).getBytes(TisUTF8.get()))) {
statement.setBlob(ci.incrementAndGet(), input);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void varcharType(DataType type) {
try {
statement.setString(ci.incrementAndGet(), (vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void intType(DataType type) {
try {
statement.setInt(ci.incrementAndGet(), Integer.parseInt(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void floatType(DataType type) {
try {
statement.setFloat(ci.incrementAndGet(), Float.parseFloat(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void decimalType(DataType type) {
try {
statement.setBigDecimal(ci.incrementAndGet(), BigDecimal.valueOf(Double.parseDouble(vals.getString(col.getName()))));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void timeType(DataType type) {
try {
statement.setTime(ci.incrementAndGet(), java.sql.Time.valueOf(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void tinyIntType(DataType dataType) {
try {
statement.setShort(ci.incrementAndGet(), Short.parseShort(vals.getString(col.getName())));
} catch (Exception e) {
throw new RuntimeException(e);
}
return null;
}
@Override
public Void smallIntType(DataType dataType) {
tinyIntType(dataType);
return null;
}
});
});
Assert.assertEquals(1, executePreparedStatement(conn, statement));
statement.close();
sleepForAWhile();
System.out.println("wait to show insert rows");
waitForSnapshotStarted(snapshot);
List<TestRow> rows = fetchRows(snapshot, 1, false);
for (TestRow rr : rows) {
System.out.println("------------" + rr.get("instance_id"));
assertTestRow(tabName, RowKind.INSERT, consumerHandle, t, rr);
}
}
});
}
};
cdcTestSuit.startTest(mysqlCDCFactory, tabName);
}
use of com.qlangtech.tis.plugin.ds.BasicDataSourceFactory in project plugins by qlangtech.
the class FlinkCDCOracleSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName channalName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
BasicDataXRdbmsReader reader = (BasicDataXRdbmsReader) dataSource;
BasicDataSourceFactory f = (BasicDataSourceFactory) reader.getDataSourceFactory();
SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(f, (tab) -> tab.getTabName(), tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
return dbs.stream().map((databaseName) -> {
SourceFunction<DTO> sourceFunction = OracleSource.<DTO>builder().hostname(dbHost).debeziumProperties(debeziumProperties).port(f.port).startupOptions(sourceFactory.getStartupOptions()).database(// monitor XE database
StringUtils.upperCase(f.dbName)).tableList(// monitor products table
tbs.toArray(new String[tbs.size()])).username(f.getUserName()).password(f.getPassword()).deserializer(// converts SourceRecord to JSON String
new TISDeserializationSchema()).build();
return new ReaderSource(dbHost + ":" + f.port + "_" + databaseName, sourceFunction);
}).collect(Collectors.toList());
}));
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(channalName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
use of com.qlangtech.tis.plugin.ds.BasicDataSourceFactory in project plugins by qlangtech.
the class FlinkCDCPostgreSQLSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
BasicDataXRdbmsReader rdbmsReader = (BasicDataXRdbmsReader) dataSource;
final BasicDataSourceFactory dsFactory = (BasicDataSourceFactory) rdbmsReader.getDataSourceFactory();
BasicDataSourceFactory.ISchemaSupported schemaSupported = (BasicDataSourceFactory.ISchemaSupported) dsFactory;
if (StringUtils.isEmpty(schemaSupported.getDBSchema())) {
throw new IllegalStateException("dsFactory:" + dsFactory.dbName + " relevant dbSchema can not be null");
}
SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(dsFactory, (tab) -> schemaSupported.getDBSchema() + "." + tab.getTabName(), tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
DateTimeConverter.setDatetimeConverters(PGDateTimeConverter.class.getName(), debeziumProperties);
return dbs.stream().map((dbname) -> {
SourceFunction<DTO> sourceFunction = PostgreSQLSource.<DTO>builder().hostname(dbHost).port(dsFactory.port).database(// monitor postgres database
dbname).schemaList(// monitor inventory schema
schemaSupported.getDBSchema()).tableList(// monitor products table
tbs.toArray(new String[tbs.size()])).username(dsFactory.userName).password(dsFactory.password).debeziumProperties(debeziumProperties).deserializer(// converts SourceRecord to JSON String
new TISDeserializationSchema()).build();
return new ReaderSource(dbHost + ":" + dsFactory.port + "_" + dbname, sourceFunction);
}).collect(Collectors.toList());
}));
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
Aggregations