use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.
the class TestDataXHudiWriter method testFlinkSqlTableDDLCreate.
@Test
public void testFlinkSqlTableDDLCreate() throws Exception {
FileSystemFactory fsFactory = EasyMock.createMock("fsFactory", FileSystemFactory.class);
ITISFileSystem fs = EasyMock.createMock("fileSystem", ITISFileSystem.class);
// fs.getRootDir()
String child = "default/customer_order_relation";
String dataDir = "hudi";
IPath rootPath = new HdfsPath(HdfsFileSystemFactoryTestUtils.DEFAULT_HDFS_ADDRESS + "/user/admin");
IPath tabPath = new HdfsPath(rootPath, child);
IPath hudiDataPath = new HdfsPath(tabPath, dataDir);
EasyMock.expect(fs.getPath(rootPath, child)).andReturn(tabPath);
EasyMock.expect(fs.getPath(tabPath, dataDir)).andReturn(hudiDataPath);
EasyMock.expect(fs.getRootDir()).andReturn(rootPath);
EasyMock.expect(fsFactory.getFileSystem()).andReturn(fs);
HudiTest forTest = createDataXWriter(Optional.of(fsFactory));
DataxProcessor dataXProcessor = EasyMock.mock("dataXProcessor", DataxProcessor.class);
File dataXCfg = folder.newFile();
FileUtils.writeStringToFile(dataXCfg, "{job:{content:[{\"writer\":" + IOUtils.loadResourceFromClasspath(this.getClass(), hudi_datax_writer_assert_without_optional) + "}]}}", TisUTF8.get());
List<File> dataXFiles = Lists.newArrayList(dataXCfg);
EasyMock.expect(dataXProcessor.getDataxCfgFileNames(null)).andReturn(dataXFiles);
DataxProcessor.processorGetter = (dataXName) -> {
Assert.assertEquals(HdfsFileSystemFactoryTestUtils.testDataXName.getName(), dataXName);
return dataXProcessor;
};
EasyMock.replay(dataXProcessor, fsFactory, fs);
// IStreamTableCreator.IStreamTableMeta
// streamTableMeta = forTest.writer.getStreamTableMeta(HudiWriter.targetTableName);
// Assert.assertNotNull("streamTableMeta can not be null", streamTableMeta);
// streamTableMeta.getColsMeta();
// System.out.println(streamTableMeta.createFlinkTableDDL());
// DataXHudiWriter.HudiStreamTemplateData tplData
// = (DataXHudiWriter.HudiStreamTemplateData) forTest.writer.decorateMergeData(
// new TestStreamTemplateData(HdfsFileSystemFactoryTestUtils.testDataXName, HudiWriter.targetTableName));
//
//
// StringBuffer createTabDdl = tplData.getSinkFlinkTableDDL(HudiWriter.targetTableName);
// Assert.assertNotNull(createTabDdl);
//
// System.out.println(createTabDdl);
EasyMock.verify(dataXProcessor, fsFactory, fs);
}
use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.
the class BasicEngineJob method createPath.
protected Path createPath() throws IOException {
// SimpleDateFormat timeFormat = new SimpleDateFormat(this.cfg.getNecessaryValue("ptFormat", HdfsWriterErrorCode.REQUIRED_VALUE));
this.dumpTable = this.createDumpTable();
TT writerPlugin = this.getWriterPlugin();
this.tabDumpParentPath = new Path(writerPlugin.getFs().getFileSystem().getRootDir().unwrap(Path.class), getHdfsSubPath());
Path pmodPath = getPmodPath();
// 将path创建
HdfsFileSystemFactory hdfsFactory = (HdfsFileSystemFactory) writerPlugin.getFs();
hdfsFactory.getFileSystem().mkdirs(new HdfsPath(pmodPath));
return pmodPath;
}
use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.
the class BasicEngineJob method initializeHiveTable.
protected void initializeHiveTable(List<HiveColumn> cols) {
try {
TT writerPlugin = getWriterPlugin();
try (Connection conn = writerPlugin.getConnection()) {
Objects.requireNonNull(this.tabDumpParentPath, "tabDumpParentPath can not be null");
ITISFileSystem fs = this.getFileSystem();
JoinHiveTask.initializeHiveTable(fs, fs.getPath(new HdfsPath(this.tabDumpParentPath), ".."), writerPlugin.getEngineType(), parseFSFormat(), cols, colsExcludePartitionCols, conn, dumpTable, this.ptRetainNum);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.qlangtech.tis.hdfs.impl.HdfsPath in project plugins by qlangtech.
the class TestDataXHdfsWriter method testdataDump.
// @Test
public void testdataDump() throws Exception {
// final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hdfsDataXName);
HdfsFileSystemFactory fsFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
ITISFileSystem fileSystem = fsFactory.getFileSystem();
// assertNotNull("fileSystem can not be null", fileSystem);
// new Path(fsFactory.rootDir
// , this.cfg.getNecessaryValue(Key.PATH, HdfsWriterErrorCode.REQUIRED_VALUE));
//
// fileSystem.getPath("");
HdfsPath p = new HdfsPath(fsFactory.rootDir + "/tis/order");
HdfsPath subWriterPath = new HdfsPath(p, "test");
try (TISFSDataOutputStream outputStream = fileSystem.create(subWriterPath, true)) {
org.apache.commons.io.IOUtils.write(IOUtils.loadResourceFromClasspath(DataXHdfsWriter.class, "hdfs-datax-writer-assert-without-option-val.json"), outputStream, TisUTF8.get());
}
System.out.println("write file success");
List<IPathInfo> iPathInfos = fileSystem.listChildren(p);
for (IPathInfo child : iPathInfos) {
fileSystem.delete(child.getPath(), true);
}
final DataXHdfsWriter hdfsWriter = new DataXHdfsWriter() {
@Override
public FileSystemFactory getFs() {
return fsFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXHdfsWriter.class;
}
};
DataxWriter.dataxWriterGetter = (name) -> {
assertEquals("mysql2hdfs", name);
return hdfsWriter;
};
// IPath path = fileSystem.getPath(fileSystem.getPath(fileSystem.getRootDir()), hdfsRelativePath);
// System.out.println("clear path:" + path);
// fileSystem.delete(path, true);
//
WriterTemplate.realExecuteDump("hdfs-datax-writer-assert-without-option-val.json", hdfsWriter);
}
Aggregations