use of com.qlangtech.tis.config.hive.meta.HiveTable in project plugins by qlangtech.
the class TestDataXHudiWriter method testRealDump.
@Test
public void testRealDump() throws Exception {
MDC.put(TISCollectionUtils.KEY_COLLECTION, HdfsFileSystemFactoryTestUtils.testDataXName.getName());
MDC.put(IParamContext.KEY_TASK_ID, "123");
HudiTest houseTest = createDataXWriter();
long timestamp = 20220311135455l;
// houseTest.writer.autoCreateTable = true;
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File dataXCfgDir = folder.newFolder();
File createDDLDir = folder.newFolder();
File createDDLFile = null;
try {
createDDLFile = new File(createDDLDir, HudiWriter.targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXHudiWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
DataXCfgGenerator.GenerateCfgs genCfg = new DataXCfgGenerator.GenerateCfgs();
genCfg.setGenTime(timestamp);
genCfg.setGroupedChildTask(Collections.singletonMap(WriterTemplate.TAB_customer_order_relation, Lists.newArrayList(WriterTemplate.TAB_customer_order_relation + "_0")));
genCfg.write2GenFile(dataXCfgDir);
EasyMock.expect(dataXProcessor.getDataxCfgDir(null)).andReturn(dataXCfgDir);
// EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
DataxWriter.dataxWriterGetter = (dataXName) -> {
return houseTest.writer;
};
DataxProcessor.processorGetter = (dataXName) -> {
Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
return dataXProcessor;
};
IExecChainContext execContext = EasyMock.mock("execContext", IExecChainContext.class);
EasyMock.expect(execContext.getPartitionTimestamp()).andReturn(String.valueOf(timestamp));
EasyMock.replay(dataXProcessor, execContext);
// WriterTemplate.realExecuteDump(hudi_datax_writer_assert_without_optional, houseTest.writer, (cfg) -> {
// cfg.set(cfgPathParameter + "." + DataxUtils.EXEC_TIMESTAMP, timestamp);
// return cfg;
// });
// DataXHudiWriter hudiWriter = new DataXHudiWriter();
// hudiWriter.dataXName = HdfsFileSystemFactoryTestUtils.testDataXName.getName();
// hudiWriter.createPostTask(execContext, tab);
HudiDumpPostTask postTask = (HudiDumpPostTask) houseTest.writer.createPostTask(execContext, houseTest.tab);
Assert.assertNotNull("postTask can not be null", postTask);
postTask.run();
IHiveConnGetter hiveConnMeta = houseTest.writer.getHiveConnMeta();
try (IHiveMetaStore metaStoreClient = hiveConnMeta.createMetaStoreClient()) {
Assert.assertNotNull(metaStoreClient);
HiveTable table = metaStoreClient.getTable(hiveConnMeta.getDbName(), WriterTemplate.TAB_customer_order_relation);
Assert.assertNotNull(WriterTemplate.TAB_customer_order_relation + " can not be null", table);
}
EasyMock.verify(dataXProcessor, execContext);
} finally {
// FileUtils.deleteQuietly(createDDLFile);
}
}
use of com.qlangtech.tis.config.hive.meta.HiveTable in project plugins by qlangtech.
the class DefaultHiveConnGetter method createMetaStoreClient.
@Override
public IHiveMetaStore createMetaStoreClient() {
try {
HiveConf c = new HiveConf();
c.set(HiveConf.ConfVars.METASTOREURIS.varname, this.metaStoreUrls);
final IMetaStoreClient storeClient = Hive.get(c, false).getMSC();
return new IHiveMetaStore() {
@Override
public HiveTable getTable(String database, String tableName) {
try {
Table table = storeClient.getTable(database, tableName);
return new HiveTable(table.getTableName());
} catch (TException e) {
throw new RuntimeException(e);
}
}
@Override
public void close() throws IOException {
storeClient.close();
}
};
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations