Search in sources :

Example 1 with HiveTable

use of com.qlangtech.tis.config.hive.meta.HiveTable in project plugins by qlangtech.

the class TestDataXHudiWriter method testRealDump.

@Test
public void testRealDump() throws Exception {
    MDC.put(TISCollectionUtils.KEY_COLLECTION, HdfsFileSystemFactoryTestUtils.testDataXName.getName());
    MDC.put(IParamContext.KEY_TASK_ID, "123");
    HudiTest houseTest = createDataXWriter();
    long timestamp = 20220311135455l;
    // houseTest.writer.autoCreateTable = true;
    DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
    File dataXCfgDir = folder.newFolder();
    File createDDLDir = folder.newFolder();
    File createDDLFile = null;
    try {
        createDDLFile = new File(createDDLDir, HudiWriter.targetTableName + IDataxProcessor.DATAX_CREATE_DDL_FILE_NAME_SUFFIX);
        FileUtils.write(createDDLFile, com.qlangtech.tis.extension.impl.IOUtils.loadResourceFromClasspath(DataXHudiWriter.class, "create_ddl_customer_order_relation.sql"), TisUTF8.get());
        DataXCfgGenerator.GenerateCfgs genCfg = new DataXCfgGenerator.GenerateCfgs();
        genCfg.setGenTime(timestamp);
        genCfg.setGroupedChildTask(Collections.singletonMap(WriterTemplate.TAB_customer_order_relation, Lists.newArrayList(WriterTemplate.TAB_customer_order_relation + "_0")));
        genCfg.write2GenFile(dataXCfgDir);
        EasyMock.expect(dataXProcessor.getDataxCfgDir(null)).andReturn(dataXCfgDir);
        // EasyMock.expect(dataXProcessor.getDataxCreateDDLDir(null)).andReturn(createDDLDir);
        DataxWriter.dataxWriterGetter = (dataXName) -> {
            return houseTest.writer;
        };
        DataxProcessor.processorGetter = (dataXName) -> {
            Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
            return dataXProcessor;
        };
        IExecChainContext execContext = EasyMock.mock("execContext", IExecChainContext.class);
        EasyMock.expect(execContext.getPartitionTimestamp()).andReturn(String.valueOf(timestamp));
        EasyMock.replay(dataXProcessor, execContext);
        // WriterTemplate.realExecuteDump(hudi_datax_writer_assert_without_optional, houseTest.writer, (cfg) -> {
        // cfg.set(cfgPathParameter + "." + DataxUtils.EXEC_TIMESTAMP, timestamp);
        // return cfg;
        // });
        // DataXHudiWriter hudiWriter = new DataXHudiWriter();
        // hudiWriter.dataXName = HdfsFileSystemFactoryTestUtils.testDataXName.getName();
        // hudiWriter.createPostTask(execContext, tab);
        HudiDumpPostTask postTask = (HudiDumpPostTask) houseTest.writer.createPostTask(execContext, houseTest.tab);
        Assert.assertNotNull("postTask can not be null", postTask);
        postTask.run();
        IHiveConnGetter hiveConnMeta = houseTest.writer.getHiveConnMeta();
        try (IHiveMetaStore metaStoreClient = hiveConnMeta.createMetaStoreClient()) {
            Assert.assertNotNull(metaStoreClient);
            HiveTable table = metaStoreClient.getTable(hiveConnMeta.getDbName(), WriterTemplate.TAB_customer_order_relation);
            Assert.assertNotNull(WriterTemplate.TAB_customer_order_relation + " can not be null", table);
        }
        EasyMock.verify(dataXProcessor, execContext);
    } finally {
    // FileUtils.deleteQuietly(createDDLFile);
    }
}
Also used : IHiveMetaStore(com.qlangtech.tis.config.hive.meta.IHiveMetaStore) DataXCfgGenerator(com.qlangtech.tis.datax.impl.DataXCfgGenerator) IExecChainContext(com.qlangtech.tis.exec.IExecChainContext) DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) File(java.io.File) IHiveConnGetter(com.qlangtech.tis.config.hive.IHiveConnGetter) HiveTable(com.qlangtech.tis.config.hive.meta.HiveTable) Test(org.junit.Test)

Example 2 with HiveTable

use of com.qlangtech.tis.config.hive.meta.HiveTable in project plugins by qlangtech.

the class DefaultHiveConnGetter method createMetaStoreClient.

@Override
public IHiveMetaStore createMetaStoreClient() {
    try {
        HiveConf c = new HiveConf();
        c.set(HiveConf.ConfVars.METASTOREURIS.varname, this.metaStoreUrls);
        final IMetaStoreClient storeClient = Hive.get(c, false).getMSC();
        return new IHiveMetaStore() {

            @Override
            public HiveTable getTable(String database, String tableName) {
                try {
                    Table table = storeClient.getTable(database, tableName);
                    return new HiveTable(table.getTableName());
                } catch (TException e) {
                    throw new RuntimeException(e);
                }
            }

            @Override
            public void close() throws IOException {
                storeClient.close();
            }
        };
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) HiveTable(com.qlangtech.tis.config.hive.meta.HiveTable) IHiveMetaStore(com.qlangtech.tis.config.hive.meta.IHiveMetaStore) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveTable(com.qlangtech.tis.config.hive.meta.HiveTable) TException(org.apache.thrift.TException) IOException(java.io.IOException)

Aggregations

HiveTable (com.qlangtech.tis.config.hive.meta.HiveTable)2 IHiveMetaStore (com.qlangtech.tis.config.hive.meta.IHiveMetaStore)2 IHiveConnGetter (com.qlangtech.tis.config.hive.IHiveConnGetter)1 IDataxProcessor (com.qlangtech.tis.datax.IDataxProcessor)1 DataXCfgGenerator (com.qlangtech.tis.datax.impl.DataXCfgGenerator)1 DataxProcessor (com.qlangtech.tis.datax.impl.DataxProcessor)1 IExecChainContext (com.qlangtech.tis.exec.IExecChainContext)1 File (java.io.File)1 IOException (java.io.IOException)1 HiveConf (org.apache.hadoop.hive.conf.HiveConf)1 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)1 Table (org.apache.hadoop.hive.metastore.api.Table)1 TException (org.apache.thrift.TException)1 Test (org.junit.Test)1