Search in sources :

Example 1 with HdfsPath

use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.

the class TestDataXHudiWriter method testFlinkSqlTableDDLCreate.

@Test
public void testFlinkSqlTableDDLCreate() throws Exception {
    FileSystemFactory fsFactory = EasyMock.createMock("fsFactory", FileSystemFactory.class);
    ITISFileSystem fs = EasyMock.createMock("fileSystem", ITISFileSystem.class);
    // fs.getRootDir()
    String child = "default/customer_order_relation";
    String dataDir = "hudi";
    IPath rootPath = new HdfsPath(HdfsFileSystemFactoryTestUtils.DEFAULT_HDFS_ADDRESS + "/user/admin");
    IPath tabPath = new HdfsPath(rootPath, child);
    IPath hudiDataPath = new HdfsPath(tabPath, dataDir);
    EasyMock.expect(fs.getPath(rootPath, child)).andReturn(tabPath);
    EasyMock.expect(fs.getPath(tabPath, dataDir)).andReturn(hudiDataPath);
    EasyMock.expect(fs.getRootDir()).andReturn(rootPath);
    EasyMock.expect(fsFactory.getFileSystem()).andReturn(fs);
    HudiTest forTest = createDataXWriter(Optional.of(fsFactory));
    DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
    File dataXCfg = folder.newFile();
    FileUtils.writeStringToFile(dataXCfg, "{job:{content:[{\"writer\":" + IOUtils.loadResourceFromClasspath(this.getClass(), hudi_datax_writer_assert_without_optional) + "}]}}", TisUTF8.get());
    List<File> dataXFiles = Lists.newArrayList(dataXCfg);
    EasyMock.expect(dataXProcessor.getDataxCfgFileNames(null)).andReturn(dataXFiles);
    DataxProcessor.processorGetter = (dataXName) -> {
        Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
        return dataXProcessor;
    };
    EasyMock.replay(dataXProcessor, fsFactory, fs);
    // IStreamTableCreator.IStreamTableMeta
    // streamTableMeta = forTest.writer.getStreamTableMeta(HudiWriter.targetTableName);
    // Assert.assertNotNull("streamTableMeta can not be null", streamTableMeta);
    // streamTableMeta.getColsMeta();
    // System.out.println(streamTableMeta.createFlinkTableDDL());
    // DataXHudiWriter.HudiStreamTemplateData tplData
    // = (DataXHudiWriter.HudiStreamTemplateData) forTest.writer.decorateMergeData(
    // new TestStreamTemplateData(HdfsFileSystemFactoryTestUtils.testDataXName, HudiWriter.targetTableName));
    // 
    // 
    // StringBuffer createTabDdl = tplData.getSinkFlinkTableDDL(HudiWriter.targetTableName);
    // Assert.assertNotNull(createTabDdl);
    // 
    // System.out.println(createTabDdl);
    EasyMock.verify(dataXProcessor, fsFactory, fs);
}
Also used : DataxProcessor(com.qlangtech.tis.datax.impl.DataxProcessor) IDataxProcessor(com.qlangtech.tis.datax.IDataxProcessor) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) IPath(com.qlangtech.tis.fs.IPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) File(java.io.File) FileSystemFactory(com.qlangtech.tis.offline.FileSystemFactory) Test(org.junit.Test)

Example 2 with HdfsPath

use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.

the class BasicEngineJob method createPath.

protected Path createPath() throws IOException {
    // SimpleDateFormat timeFormat = new SimpleDateFormat(this.cfg.getNecessaryValue("ptFormat", HdfsWriterErrorCode.REQUIRED_VALUE));
    this.dumpTable = this.createDumpTable();
    TT writerPlugin = this.getWriterPlugin();
    this.tabDumpParentPath = new Path(writerPlugin.getFs().getFileSystem().getRootDir().unwrap(Path.class), getHdfsSubPath());
    Path pmodPath = getPmodPath();
    // 将path创建
    HdfsFileSystemFactory hdfsFactory = (HdfsFileSystemFactory) writerPlugin.getFs();
    hdfsFactory.getFileSystem().mkdirs(new HdfsPath(pmodPath));
    return pmodPath;
}
Also used : HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) Path(org.apache.hadoop.fs.Path) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) HdfsFileSystemFactory(com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)

Example 3 with HdfsPath

use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.

the class BasicEngineJob method initializeHiveTable.

protected void initializeHiveTable(List<HiveColumn> cols) {
    try {
        TT writerPlugin = getWriterPlugin();
        try (Connection conn = writerPlugin.getConnection()) {
            Objects.requireNonNull(this.tabDumpParentPath, "tabDumpParentPath can not be null");
            ITISFileSystem fs = this.getFileSystem();
            JoinHiveTask.initializeHiveTable(fs, fs.getPath(new HdfsPath(this.tabDumpParentPath), ".."), writerPlugin.getEngineType(), parseFSFormat(), cols, colsExcludePartitionCols, conn, dumpTable, this.ptRetainNum);
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) Connection(java.sql.Connection) SQLException(java.sql.SQLException) IOException(java.io.IOException)

Example 4 with HdfsPath

use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.

the class TestDataXHdfsWriter method testdataDump.

// @Test
public void testdataDump() throws Exception {
    // final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hdfsDataXName);
    HdfsFileSystemFactory fsFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
    ITISFileSystem fileSystem = fsFactory.getFileSystem();
    // assertNotNull("fileSystem can not be null", fileSystem);
    // new Path(fsFactory.rootDir
    // , this.cfg.getNecessaryValue(Key.PATH, HdfsWriterErrorCode.REQUIRED_VALUE));
    // 
    // fileSystem.getPath("");
    HdfsPath p = new HdfsPath(fsFactory.rootDir + "/tis/order");
    HdfsPath subWriterPath = new HdfsPath(p, "test");
    try (TISFSDataOutputStream outputStream = fileSystem.create(subWriterPath, true)) {
        org.apache.commons.io.IOUtils.write(IOUtils.loadResourceFromClasspath(DataXHdfsWriter.class, "hdfs-datax-writer-assert-without-option-val.json"), outputStream, TisUTF8.get());
    }
    System.out.println("write file success");
    List<IPathInfo> iPathInfos = fileSystem.listChildren(p);
    for (IPathInfo child : iPathInfos) {
        fileSystem.delete(child.getPath(), true);
    }
    final DataXHdfsWriter hdfsWriter = new DataXHdfsWriter() {

        @Override
        public FileSystemFactory getFs() {
            return fsFactory;
        }

        @Override
        public Class<?> getOwnerClass() {
            return DataXHdfsWriter.class;
        }
    };
    DataxWriter.dataxWriterGetter = (name) -> {
        assertEquals("mysql2hdfs", name);
        return hdfsWriter;
    };
    // IPath path = fileSystem.getPath(fileSystem.getPath(fileSystem.getRootDir()), hdfsRelativePath);
    // System.out.println("clear path:" + path);
    // fileSystem.delete(path, true);
    // 
    WriterTemplate.realExecuteDump("hdfs-datax-writer-assert-without-option-val.json", hdfsWriter);
}
Also used : IPathInfo(com.qlangtech.tis.fs.IPathInfo) HdfsPath(com.qlangtech.tis.hdfs.impl.HdfsPath) ITISFileSystem(com.qlangtech.tis.fs.ITISFileSystem) TISFSDataOutputStream(com.qlangtech.tis.fs.TISFSDataOutputStream) HdfsFileSystemFactory(com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)

Aggregations

HdfsPath (com.qlangtech.tis.hdfs.impl.HdfsPath)4 ITISFileSystem (com.qlangtech.tis.fs.ITISFileSystem)3 HdfsFileSystemFactory (com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory)2 IDataxProcessor (com.qlangtech.tis.datax.IDataxProcessor)1 DataxProcessor (com.qlangtech.tis.datax.impl.DataxProcessor)1 IPath (com.qlangtech.tis.fs.IPath)1 IPathInfo (com.qlangtech.tis.fs.IPathInfo)1 TISFSDataOutputStream (com.qlangtech.tis.fs.TISFSDataOutputStream)1 FileSystemFactory (com.qlangtech.tis.offline.FileSystemFactory)1 File (java.io.File)1 IOException (java.io.IOException)1 Connection (java.sql.Connection)1 SQLException (java.sql.SQLException)1 Path (org.apache.hadoop.fs.Path)1 Test (org.junit.Test)1