use of com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory in project plugins by qlangtech.
the class TestDataXHdfsReader method createHdfsReader.
protected DataXHdfsReader createHdfsReader(String dataXName) {
final HdfsFileSystemFactory fsFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
DataXHdfsReader dataxReader = new DataXHdfsReader() {
@Override
public HdfsFileSystemFactory getFs() {
return fsFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXHdfsReader.class;
}
};
dataxReader.dataXName = dataXName;
dataxReader.template = DataXHdfsReader.getDftTemplate();
dataxReader.column = "[\n" + " {\n" + " \"index\": 0,\n" + " \"type\": \"string\"\n" + " },\n" + " {\n" + " \"index\": 1,\n" + " \"type\": \"string\"\n" + " },\n" + " {\n" + " \"index\": 2,\n" + " \"type\": \"string\"\n" + " },\n" + " {\n" + " \"index\": 3,\n" + " \"type\": \"string\"\n" + " },\n" + " {\n" + " \"index\": 4,\n" + " \"type\": \"string\"\n" + " }\n" + " ]";
dataxReader.fsName = "default";
dataxReader.compress = "gzip";
dataxReader.csvReaderConfig = "{\n" + " \"safetySwitch\": false,\n" + " \"skipEmptyRecords\": false,\n" + " \"useTextQualifier\": false\n" + "}";
dataxReader.fieldDelimiter = ",";
dataxReader.encoding = "utf-8";
dataxReader.nullFormat = "\\\\N";
dataxReader.fileType = "text";
dataxReader.path = "tis/order/*";
return dataxReader;
}
use of com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory in project plugins by qlangtech.
the class BasicEngineJob method createPath.
protected Path createPath() throws IOException {
// SimpleDateFormat timeFormat = new SimpleDateFormat(this.cfg.getNecessaryValue("ptFormat", HdfsWriterErrorCode.REQUIRED_VALUE));
this.dumpTable = this.createDumpTable();
TT writerPlugin = this.getWriterPlugin();
this.tabDumpParentPath = new Path(writerPlugin.getFs().getFileSystem().getRootDir().unwrap(Path.class), getHdfsSubPath());
Path pmodPath = getPmodPath();
// 将path创建
HdfsFileSystemFactory hdfsFactory = (HdfsFileSystemFactory) writerPlugin.getFs();
hdfsFactory.getFileSystem().mkdirs(new HdfsPath(pmodPath));
return pmodPath;
}
use of com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory in project plugins by qlangtech.
the class TestDataXHiveWriter method testDataDump.
public void testDataDump() throws Exception {
HdfsFileSystemFactory hdfsFileSystemFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
final DefaultHiveConnGetter hiveConnGetter = new DefaultHiveConnGetter();
hiveConnGetter.dbName = "tis";
hiveConnGetter.hiveAddress = "192.168.28.200:10000";
final DataXHiveWriter dataxWriter = new DataXHiveWriter() {
@Override
public IHiveConnGetter getHiveConnGetter() {
return hiveConnGetter;
}
@Override
public FileSystemFactory getFs() {
return hdfsFileSystemFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXHiveWriter.class;
}
};
DataxWriter.dataxWriterGetter = (name) -> {
assertEquals("mysql2hive", name);
return dataxWriter;
};
WriterTemplate.realExecuteDump("hive-datax-writer-assert-without-option-val.json", dataxWriter);
}
use of com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory in project plugins by qlangtech.
the class TestDataXSparkWriter method testDataDump.
public void testDataDump() throws Exception {
// final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hiveDataXName);
HdfsFileSystemFactory hdfsFileSystemFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
ITISFileSystem fileSystem = hdfsFileSystemFactory.getFileSystem();
final DefaultHiveConnGetter hiveConnGetter = new DefaultHiveConnGetter();
hiveConnGetter.dbName = "tis";
hiveConnGetter.hiveAddress = "192.168.28.200:10000";
// HdfsPath historicalPath = new HdfsPath(hdfsFileSystemFactory.rootDir + "/" + hiveConnGetter.dbName + "/customer_order_relation");
// fileSystem.delete(historicalPath, true);
final DataXSparkWriter dataxWriter = new DataXSparkWriter() {
@Override
public IHiveConnGetter getHiveConnGetter() {
return hiveConnGetter;
}
@Override
public FileSystemFactory getFs() {
return hdfsFileSystemFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXSparkWriter.class;
}
};
DataxWriter.dataxWriterGetter = (name) -> {
assertEquals(mysql2hiveDataXName, name);
return dataxWriter;
};
WriterTemplate.realExecuteDump("spark-datax-writer-assert-without-option-val.json", dataxWriter);
}
use of com.qlangtech.tis.hdfs.impl.HdfsFileSystemFactory in project plugins by qlangtech.
the class TestDataXHdfsWriter method testdataDump.
// @Test
public void testdataDump() throws Exception {
// final DataxWriter dataxWriter = DataxWriter.load(null, mysql2hdfsDataXName);
HdfsFileSystemFactory fsFactory = HdfsFileSystemFactoryTestUtils.getFileSystemFactory();
ITISFileSystem fileSystem = fsFactory.getFileSystem();
// assertNotNull("fileSystem can not be null", fileSystem);
// new Path(fsFactory.rootDir
// , this.cfg.getNecessaryValue(Key.PATH, HdfsWriterErrorCode.REQUIRED_VALUE));
//
// fileSystem.getPath("");
HdfsPath p = new HdfsPath(fsFactory.rootDir + "/tis/order");
HdfsPath subWriterPath = new HdfsPath(p, "test");
try (TISFSDataOutputStream outputStream = fileSystem.create(subWriterPath, true)) {
org.apache.commons.io.IOUtils.write(IOUtils.loadResourceFromClasspath(DataXHdfsWriter.class, "hdfs-datax-writer-assert-without-option-val.json"), outputStream, TisUTF8.get());
}
System.out.println("write file success");
List<IPathInfo> iPathInfos = fileSystem.listChildren(p);
for (IPathInfo child : iPathInfos) {
fileSystem.delete(child.getPath(), true);
}
final DataXHdfsWriter hdfsWriter = new DataXHdfsWriter() {
@Override
public FileSystemFactory getFs() {
return fsFactory;
}
@Override
public Class<?> getOwnerClass() {
return DataXHdfsWriter.class;
}
};
DataxWriter.dataxWriterGetter = (name) -> {
assertEquals("mysql2hdfs", name);
return hdfsWriter;
};
// IPath path = fileSystem.getPath(fileSystem.getPath(fileSystem.getRootDir()), hdfsRelativePath);
// System.out.println("clear path:" + path);
// fileSystem.delete(path, true);
//
WriterTemplate.realExecuteDump("hdfs-datax-writer-assert-without-option-val.json", hdfsWriter);
}
Aggregations