use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.
the class FlinkCDCPostgreSQLSourceFunction method start.
@Override
public JobExecutionResult start(TargetResName dataxName, IDataxReader dataSource, List<ISelectedTab> tabs, IDataxProcessor dataXProcessor) throws MQConsumeException {
try {
BasicDataXRdbmsReader rdbmsReader = (BasicDataXRdbmsReader) dataSource;
final BasicDataSourceFactory dsFactory = (BasicDataSourceFactory) rdbmsReader.getDataSourceFactory();
BasicDataSourceFactory.ISchemaSupported schemaSupported = (BasicDataSourceFactory.ISchemaSupported) dsFactory;
if (StringUtils.isEmpty(schemaSupported.getDBSchema())) {
throw new IllegalStateException("dsFactory:" + dsFactory.dbName + " relevant dbSchema can not be null");
}
SourceChannel sourceChannel = new SourceChannel(SourceChannel.getSourceFunction(dsFactory, (tab) -> schemaSupported.getDBSchema() + "." + tab.getTabName(), tabs, (dbHost, dbs, tbs, debeziumProperties) -> {
DateTimeConverter.setDatetimeConverters(PGDateTimeConverter.class.getName(), debeziumProperties);
return dbs.stream().map((dbname) -> {
SourceFunction<DTO> sourceFunction = PostgreSQLSource.<DTO>builder().hostname(dbHost).port(dsFactory.port).database(// monitor postgres database
dbname).schemaList(// monitor inventory schema
schemaSupported.getDBSchema()).tableList(// monitor products table
tbs.toArray(new String[tbs.size()])).username(dsFactory.userName).password(dsFactory.password).debeziumProperties(debeziumProperties).deserializer(// converts SourceRecord to JSON String
new TISDeserializationSchema()).build();
return new ReaderSource(dbHost + ":" + dsFactory.port + "_" + dbname, sourceFunction);
}).collect(Collectors.toList());
}));
for (ISelectedTab tab : tabs) {
sourceChannel.addFocusTab(tab.getName());
}
return (JobExecutionResult) getConsumerHandle().consume(dataxName, sourceChannel, dataXProcessor);
} catch (Exception e) {
throw new MQConsumeException(e.getMessage(), e);
}
}
use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.
the class TISFlinkCDCStart method deploy.
private static void deploy(TargetResName dataxName, BasicFlinkSourceHandle tableStreamHandle, ReplicasSpec incrSpec, long timestamp) throws Exception {
// BasicFlinkSourceHandle tisFlinkSourceHandle = new TISFlinkSourceHandle();
if (tableStreamHandle == null) {
throw new IllegalStateException("tableStreamHandle has not been instantiated");
}
// ElasticSearchSinkFactory esSinkFactory = new ElasticSearchSinkFactory();
// IPluginContext pluginContext = IPluginContext.namedContext(dataxName.getName());
// List<TISSinkFactory> sinkFactories = TISSinkFactory.sinkFactory.getPlugins(pluginContext, null);
// logger.info("sinkFactories size:" + sinkFactories.size());
// for (TISSinkFactory factory : sinkFactories) {
// sinkFactory = factory;
// break;
// }
// Objects.requireNonNull(sinkFactory, "sinkFactories.size():" + sinkFactories.size());
tableStreamHandle.setSinkFuncFactory(TISSinkFactory.getIncrSinKFactory(dataxName.getName()));
// List<MQListenerFactory> mqFactories = HeteroEnum.MQ.getPlugins(pluginContext, null);
MQListenerFactory mqFactory = HeteroEnum.getIncrSourceListenerFactory(dataxName.getName());
mqFactory.setConsumerHandle(tableStreamHandle);
// for (MQListenerFactory factory : mqFactories) {
// factory.setConsumerHandle(tableStreamHandle);
// mqFactory = factory;
// }
// Objects.requireNonNull(mqFactory, "mqFactory can not be null, mqFactories size:" + mqFactories.size());
IMQListener mq = mqFactory.create();
IDataxProcessor dataXProcess = DataxProcessor.load(null, dataxName.getName());
DataxReader reader = (DataxReader) dataXProcess.getReader(null);
if (reader == null) {
throw new IllegalStateException("dataXReader is illegal");
}
// DBConfigGetter rdbmsReader = (DBConfigGetter) reader;
List<ISelectedTab> tabs = reader.getSelectedTabs();
mq.start(dataxName, reader, tabs, dataXProcess);
}
use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.
the class TestLocalDataXJobSubmit method testCreateDataXJob.
public void testCreateDataXJob() throws Exception {
Optional<DataXJobSubmit> dataXJobSubmit = DataXJobSubmit.getDataXJobSubmit(DataXJobSubmit.InstanceType.LOCAL);
Assert.assertTrue("dataXJobSubmit shall present", dataXJobSubmit.isPresent());
LocalDataXJobSubmit jobSubmit = (LocalDataXJobSubmit) dataXJobSubmit.get();
jobSubmit.setMainClassName(LocalDataXJobMainEntrypoint.class.getName());
jobSubmit.setWorkingDirectory(new File("."));
jobSubmit.setClasspath("target/classes:target/test-classes");
AtomicReference<ITISRpcService> ref = new AtomicReference<>();
ref.set(StatusRpcClient.AssembleSvcCompsite.MOCK_PRC);
RpcServiceReference statusRpc = new RpcServiceReference(ref);
DataXJobSubmit.IDataXJobContext dataXJobContext = EasyMock.createMock("dataXJobContext", DataXJobSubmit.IDataXJobContext.class);
IExecChainContext taskContext = EasyMock.createMock("taskContext", IExecChainContext.class);
EasyMock.expect(dataXJobContext.getTaskContext()).andReturn(taskContext).anyTimes();
IDataxProcessor dataxProcessor = EasyMock.createMock("dataxProcessor", IDataxProcessor.class);
EasyMock.expect(taskContext.getIndexName()).andReturn(dataXName).anyTimes();
EasyMock.expect(taskContext.getTaskId()).andReturn(TaskId).anyTimes();
int preSuccessTaskId = 99;
PhaseStatusCollection preSuccessTask = new PhaseStatusCollection(preSuccessTaskId, new ExecutePhaseRange(FullbuildPhase.FullDump, FullbuildPhase.FullDump));
DumpPhaseStatus preDumpStatus = new DumpPhaseStatus(preSuccessTaskId);
DumpPhaseStatus.TableDumpStatus tableDumpStatus = preDumpStatus.getTable(dataXfileName);
tableDumpStatus.setAllRows(LocalDataXJobMainEntrypoint.testAllRows);
preSuccessTask.setDumpPhase(preDumpStatus);
EasyMock.expect(taskContext.loadPhaseStatusFromLatest(dataXName)).andReturn(preSuccessTask).times(3);
TisZkClient zkClient = EasyMock.createMock("TisZkClient", TisZkClient.class);
String zkSubPath = "nodes0000000020";
EasyMock.expect(zkClient.getChildren(ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH, null, true)).andReturn(Collections.singletonList(zkSubPath)).times(3);
EasyMock.expect(zkClient.getData(EasyMock.eq(ZkUtils.ZK_ASSEMBLE_LOG_COLLECT_PATH + "/" + zkSubPath), EasyMock.isNull(), EasyMock.anyObject(Stat.class), EasyMock.eq(true))).andReturn(statusCollectorHost.getBytes(TisUTF8.get())).times(3);
EasyMock.expect(taskContext.getZkClient()).andReturn(zkClient).anyTimes();
EasyMock.replay(taskContext, dataxProcessor, zkClient, dataXJobContext);
IRemoteTaskTrigger dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
RunningStatus running = getRunningStatus(dataXJob);
assertTrue("running.isSuccess", running.isSuccess());
jobSubmit.setMainClassName(LocalDataXJobMainEntrypointThrowException.class.getName());
dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
running = getRunningStatus(dataXJob);
assertFalse("shall faild", running.isSuccess());
assertTrue("shall complete", running.isComplete());
jobSubmit.setMainClassName(LocalDataXJobMainEntrypointCancellable.class.getName());
dataXJob = jobSubmit.createDataXJob(dataXJobContext, statusRpc, dataxProcessor, dataXfileName);
running = getRunningStatus(dataXJob, false);
Thread.sleep(2000);
dataXJob.cancel();
int i = 0;
while (i++ < 3 && !(running = dataXJob.getRunningStatus()).isComplete()) {
Thread.sleep(1000);
}
assertFalse("shall faild", running.isSuccess());
assertTrue("shall complete", running.isComplete());
EasyMock.verify(taskContext, dataxProcessor, zkClient);
}
use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.
the class TestDataxMySQLWriter method validateConfigGenerate.
private void validateConfigGenerate(String assertFileName, DataxMySQLWriter mySQLWriter) throws IOException {
// IDataxProcessor.TableMap tm = new IDataxProcessor.TableMap();
// tm.setFrom("orderinfo");
// tm.setTo("orderinfo_new");
// tm.setSourceCols(Lists.newArrayList("col1", "col2", "col3").stream().map((c) -> {
// ISelectedTab.ColMeta meta = new ISelectedTab.ColMeta();
// meta.setName(c);
// return meta;
// }).collect(Collectors.toList()));
Optional<IDataxProcessor.TableMap> tableMap = TestSelectedTabs.createTableMapper();
IDataxContext subTaskCtx = mySQLWriter.getSubTask(tableMap);
assertNotNull(subTaskCtx);
RdbmsDataxContext mySQLDataxContext = (RdbmsDataxContext) subTaskCtx;
assertEquals("\"`col1`\",\"`col2`\",\"`col3`\"", mySQLDataxContext.getColsQuotes());
assertEquals(mysqlJdbcUrl, mySQLDataxContext.getJdbcUrl());
assertEquals("123456", mySQLDataxContext.getPassword());
assertEquals("orderinfo_new", mySQLDataxContext.tabName);
assertEquals("root", mySQLDataxContext.getUsername());
IDataxProcessor processor = EasyMock.mock("dataxProcessor", IDataxProcessor.class);
IDataxGlobalCfg dataxGlobalCfg = EasyMock.mock("dataxGlobalCfg", IDataxGlobalCfg.class);
IDataxReader dataxReader = EasyMock.mock("dataxReader", IDataxReader.class);
EasyMock.expect(processor.getReader(null)).andReturn(dataxReader);
EasyMock.expect(processor.getWriter(null)).andReturn(mySQLWriter);
EasyMock.expect(processor.getDataXGlobalCfg()).andReturn(dataxGlobalCfg);
EasyMock.replay(processor, dataxGlobalCfg, dataxReader);
DataXCfgGenerator dataProcessor = new DataXCfgGenerator(null, "testDataXName", processor) {
@Override
public String getTemplateContent() {
return mySQLWriter.getTemplate();
}
};
String cfgResult = dataProcessor.generateDataxConfig(null, mySQLWriter, dataxReader, tableMap);
JsonUtil.assertJSONEqual(this.getClass(), assertFileName, cfgResult, (m, e, a) -> {
assertEquals(m, e, a);
});
EasyMock.verify(processor, dataxGlobalCfg, dataxReader);
}
use of com.qlangtech.tis.datax.IDataxProcessor in project plugins by qlangtech.
the class TestDataXOssWriter method testTempateGenerate.
public void testTempateGenerate() throws Exception {
IDataxProcessor processor = EasyMock.mock("dataxProcessor", IDataxProcessor.class);
IDataxGlobalCfg dataxGlobalCfg = EasyMock.mock("dataxGlobalCfg", IDataxGlobalCfg.class);
EasyMock.expect(processor.getDataXGlobalCfg()).andReturn(dataxGlobalCfg).anyTimes();
// IDataxWriter dataxWriter = EasyMock.mock("dataxWriter", IDataxWriter.class);
// IDataxContext dataxContext = EasyMock.mock("dataxWriterContext", IDataxContext.class);
// EasyMock.expect(dataxWriter.getSubTask(Optional.empty())).andReturn(dataxContext).anyTimes();
DataXOssWriter ossWriter = new DataXOssWriter();
ossWriter.endpoint = "aliyun-bj-endpoint";
ossWriter.bucket = "testBucket";
ossWriter.object = "tis/mytable/*";
ossWriter.template = DataXOssWriter.getDftTemplate();
ossWriter.header = "[\"name\",\"age\",\"degree\"]";
ossWriter.encoding = "utf-8";
ossWriter.fieldDelimiter = "\t";
ossWriter.writeMode = "nonConflict";
ossWriter.dateFormat = "yyyy-MM-dd";
ossWriter.fileFormat = "csv";
ossWriter.maxFileSize = 300;
ossWriter.nullFormat = "\\\\N";
EasyMock.expect(processor.getWriter(null)).andReturn(ossWriter).anyTimes();
EasyMock.replay(processor, dataxGlobalCfg);
valiateWriterCfgGenerate("oss-datax-writer-assert.json", processor, ossWriter);
ossWriter.fieldDelimiter = null;
ossWriter.encoding = null;
ossWriter.nullFormat = null;
ossWriter.dateFormat = null;
ossWriter.fileFormat = null;
ossWriter.header = null;
ossWriter.maxFileSize = null;
valiateWriterCfgGenerate("oss-datax-writer-assert-without-option-val.json", processor, ossWriter);
EasyMock.verify(processor, dataxGlobalCfg);
}
Aggregations