use of com.qlangtech.tis.datax.impl.DataxProcessor in project plugins by qlangtech.
the class TestDataXHudiWriter method testFlinkSqlTableDDLCreate.
@Test
public void testFlinkSqlTableDDLCreate() throws Exception {
FileSystemFactory fsFactory = EasyMock.createMock("fsFactory", FileSystemFactory.class);
ITISFileSystem fs = EasyMock.createMock("fileSystem", ITISFileSystem.class);
// fs.getRootDir()
String child = "default/customer_order_relation";
String dataDir = "hudi";
IPath rootPath = new HdfsPath(HdfsFileSystemFactoryTestUtils.DEFAULT_HDFS_ADDRESS + "/user/admin");
IPath tabPath = new HdfsPath(rootPath, child);
IPath hudiDataPath = new HdfsPath(tabPath, dataDir);
EasyMock.expect(fs.getPath(rootPath, child)).andReturn(tabPath);
EasyMock.expect(fs.getPath(tabPath, dataDir)).andReturn(hudiDataPath);
EasyMock.expect(fs.getRootDir()).andReturn(rootPath);
EasyMock.expect(fsFactory.getFileSystem()).andReturn(fs);
HudiTest forTest = createDataXWriter(Optional.of(fsFactory));
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File dataXCfg = folder.newFile();
FileUtils.writeStringToFile(dataXCfg, "{job:{content:[{\"writer\":" + IOUtils.loadResourceFromClasspath(this.getClass(), hudi_datax_writer_assert_without_optional) + "}]}}", TisUTF8.get());
List<File> dataXFiles = Lists.newArrayList(dataXCfg);
EasyMock.expect(dataXProcessor.getDataxCfgFileNames(null)).andReturn(dataXFiles);
DataxProcessor.processorGetter = (dataXName) -> {
Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
return dataXProcessor;
};
EasyMock.replay(dataXProcessor, fsFactory, fs);
// IStreamTableCreator.IStreamTableMeta
// streamTableMeta = forTest.writer.getStreamTableMeta(HudiWriter.targetTableName);
// Assert.assertNotNull("streamTableMeta can not be null", streamTableMeta);
// streamTableMeta.getColsMeta();
// System.out.println(streamTableMeta.createFlinkTableDDL());
// DataXHudiWriter.HudiStreamTemplateData tplData
// = (DataXHudiWriter.HudiStreamTemplateData) forTest.writer.decorateMergeData(
// new TestStreamTemplateData(HdfsFileSystemFactoryTestUtils.testDataXName, HudiWriter.targetTableName));
//
//
// StringBuffer createTabDdl = tplData.getSinkFlinkTableDDL(HudiWriter.targetTableName);
// Assert.assertNotNull(createTabDdl);
//
// System.out.println(createTabDdl);
EasyMock.verify(dataXProcessor, fsFactory, fs);
}
use of com.qlangtech.tis.datax.impl.DataxProcessor in project plugins by qlangtech.
the class TestDataXHudiWriter method testRealDump.
@Test
public void testRealDump() throws Exception {
MDC.put(TISCollectionUtils.KEY_COLLECTION, HdfsFileSystemFactoryTestUtils.testDataXName.getName());
MDC.put(IParamContext.KEY_TASK_ID, "123");
HudiTest houseTest = createDataXWriter();
long timestamp = 20220311135455l;
// houseTest.writer.autoCreateTable = true;
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File dataXCfgDir = folder.newFolder();
File createDDLDir = folder.newFolder();
File createDDLFile = null;
try {
createDDLFile = new File(createDDLDir, HudiWriter.targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXHudiWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
DataXCfgGenerator.GenerateCfgs genCfg = new DataXCfgGenerator.GenerateCfgs();
genCfg.setGenTime(timestamp);
genCfg.setGroupedChildTask(Collections.singletonMap(WriterTemplate.TAB_customer_order_relation, Lists.newArrayList(WriterTemplate.TAB_customer_order_relation + "_0")));
genCfg.write2GenFile(dataXCfgDir);
EasyMock.expect(dataXProcessor.getDataxCfgDir(null)).andReturn(dataXCfgDir);
// EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
DataxWriter.dataxWriterGetter = (dataXName) -> {
return houseTest.writer;
};
DataxProcessor.processorGetter = (dataXName) -> {
Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
return dataXProcessor;
};
IExecChainContext execContext = EasyMock.mock("execContext", IExecChainContext.class);
EasyMock.expect(execContext.getPartitionTimestamp()).andReturn(String.valueOf(timestamp));
EasyMock.replay(dataXProcessor, execContext);
// WriterTemplate.realExecuteDump(hudi_datax_writer_assert_without_optional, houseTest.writer, (cfg) -> {
// cfg.set(cfgPathParameter + "." + DataxUtils.EXEC_TIMESTAMP, timestamp);
// return cfg;
// });
// DataXHudiWriter hudiWriter = new DataXHudiWriter();
// hudiWriter.dataXName = HdfsFileSystemFactoryTestUtils.testDataXName.getName();
// hudiWriter.createPostTask(execContext, tab);
HudiDumpPostTask postTask = (HudiDumpPostTask) houseTest.writer.createPostTask(execContext, houseTest.tab);
Assert.assertNotNull("postTask can not be null", postTask);
postTask.run();
IHiveConnGetter hiveConnMeta = houseTest.writer.getHiveConnMeta();
try (IHiveMetaStore metaStoreClient = hiveConnMeta.createMetaStoreClient()) {
Assert.assertNotNull(metaStoreClient);
HiveTable table = metaStoreClient.getTable(hiveConnMeta.getDbName(), WriterTemplate.TAB_customer_order_relation);
Assert.assertNotNull(WriterTemplate.TAB_customer_order_relation + " can not be null", table);
}
EasyMock.verify(dataXProcessor, execContext);
} finally {
// FileUtils.deleteQuietly(createDDLFile);
}
}
use of com.qlangtech.tis.datax.impl.DataxProcessor in project plugins by qlangtech.
the class TestDataXDorisWriter method testRealDump.
public void testRealDump() throws Exception {
String targetTableName = "customer_order_relation";
String testDataXName = "mysql_doris";
CreateDorisWriter createDorisWriter = new CreateDorisWriter().invoke();
createDorisWriter.dsFactory.password = "";
createDorisWriter.dsFactory.nodeDesc = "192.168.28.201";
createDorisWriter.writer.autoCreateTable = true;
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File createDDLDir = new File(".");
File createDDLFile = null;
try {
createDDLFile = new File(createDDLDir, targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXDorisWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
DataxWriter.dataxWriterGetter = (dataXName) -> {
return createDorisWriter.writer;
};
DataxProcessor.processorGetter = (dataXName) -> {
assertEquals(testDataXName, dataXName);
return dataXProcessor;
};
EasyMock.replay(dataXProcessor);
// DataXDorisWriter writer = new DataXDorisWriter();
WriterTemplate.realExecuteDump("doris_writer_real_dump.json", createDorisWriter.writer);
EasyMock.verify(dataXProcessor);
} finally {
FileUtils.forceDelete(createDDLFile);
}
}
use of com.qlangtech.tis.datax.impl.DataxProcessor in project plugins by qlangtech.
the class TestDataXStarRocksWriter method testRealDump.
public void testRealDump() throws Exception {
String targetTableName = "customer_order_relation";
String testDataXName = "mysql_doris";
CreateDorisWriter createDorisWriter = new CreateDorisWriter().invoke();
createDorisWriter.dsFactory.password = "";
createDorisWriter.dsFactory.nodeDesc = "192.168.28.201";
createDorisWriter.writer.autoCreateTable = true;
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File createDDLDir = new File(".");
File createDDLFile = null;
try {
createDDLFile = new File(createDDLDir, targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXDorisWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
DataxWriter.dataxWriterGetter = (dataXName) -> {
return createDorisWriter.writer;
};
DataxProcessor.processorGetter = (dataXName) -> {
assertEquals(testDataXName, dataXName);
return dataXProcessor;
};
EasyMock.replay(dataXProcessor);
// DataXDorisWriter writer = new DataXDorisWriter();
WriterTemplate.realExecuteDump("starrocks_writer_real_dump.json", createDorisWriter.writer);
EasyMock.verify(dataXProcessor);
} finally {
FileUtils.forceDelete(createDDLFile);
}
}
use of com.qlangtech.tis.datax.impl.DataxProcessor in project plugins by qlangtech.
the class TestDistributedOverseerDataXJobSubmit method testPushMsgToDistributeQueue.
public void testPushMsgToDistributeQueue() {
DataXJobWorker dataxJobWorker = DataXJobWorker.getJobWorker(DataXJobWorker.K8S_DATAX_INSTANCE_NAME);
assertEquals("/datax/jobs", dataxJobWorker.getZkQueuePath());
assertEquals("192.168.28.200:2181/tis/cloud", dataxJobWorker.getZookeeperAddress());
DataxProcessor dataxProcessor = IAppSource.load(DATAX_NAME);
assertNotNull(dataxProcessor);
// IDataxProcessor dataxProcessor = EasyMock.createMock("dataxProcessor", IDataxProcessor.class);
// EasyMock.expect(dataxProcessor.getDataxCfgDir()).andReturn();
IJoinTaskContext taskContext = EasyMock.createMock("joinTaskContext", IJoinTaskContext.class);
EasyMock.expect(taskContext.getIndexName()).andReturn(DATAX_NAME);
EasyMock.expect(taskContext.getTaskId()).andReturn(DATAX_TASK_ID);
AtomicReference<ITISRpcService> ref = new AtomicReference<>();
ref.set(StatusRpcClient.AssembleSvcCompsite.MOCK_PRC);
RpcServiceReference svcRef = new RpcServiceReference(ref);
Optional<DataXJobSubmit> jobSubmit = DataXJobSubmit.getDataXJobSubmit(DataXJobSubmit.InstanceType.DISTRIBUTE);
assertTrue(jobSubmit.isPresent());
DataXJobSubmit submit = jobSubmit.get();
DataXJobSubmit.IDataXJobContext jobContext = submit.createJobContext(taskContext);
EasyMock.replay(taskContext);
// IJoinTaskContext taskContext
// , RpcServiceReference statusRpc, IDataxProcessor dataxProcessor, String dataXfileName
IRemoteTaskTrigger dataXJob = submit.createDataXJob(jobContext, svcRef, dataxProcessor, DATAX_JOB_FILE_NAME);
dataXJob.run();
EasyMock.verify(taskContext);
}
Aggregations