use of com.dtstack.dtcenter.loader.client.IHdfsFile in project Taier by DTStack.
the class HdfsOperator method uploadInputStreamToHdfs.
public static boolean uploadInputStreamToHdfs(Map<String, Object> conf, Map<String, Object> kerberos, byte[] bytes, String hdfsPath) {
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
return hdfsClient.uploadInputStreamToHdfs(sourceDTO, bytes, hdfsPath);
}
use of com.dtstack.dtcenter.loader.client.IHdfsFile in project Taier by DTStack.
the class HdfsOperator method createDir.
public static boolean createDir(Map<String, Object> conf, Map<String, Object> kerberos, String dir) {
dir = uri(dir);
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
return hdfsClient.createDir(sourceDTO, dir, null);
}
use of com.dtstack.dtcenter.loader.client.IHdfsFile in project Taier by DTStack.
the class HdfsOperator method fileMerge.
/**
* 获取hdfs上指定路径集合的ContentSummary
*
* @param conf hadoop配置
* @param kerberos kerberos配置
* @param src 原路径
* @param mergePath 合并临时目录
* @param fileFormat 文件存储格式
* @param needCombineFileSizeLimit 需要合并的文件大小阈值
* @param maxCombinedFileSize 合并后的最大大小
*/
public static void fileMerge(Map<String, Object> conf, Map<String, Object> kerberos, String src, String mergePath, FileFormat fileFormat, Long maxCombinedFileSize, Long needCombineFileSizeLimit) {
try {
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
hdfsClient.fileMerge(sourceDTO, src, mergePath, fileFormat, maxCombinedFileSize, needCombineFileSizeLimit);
} catch (Exception e) {
throw new DtCenterDefException(String.format("文件合并异常!原因是:%s", e.getMessage()));
}
}
use of com.dtstack.dtcenter.loader.client.IHdfsFile in project Taier by DTStack.
the class HdfsOperator method getHdfsFileClient.
public static IHdfsFile getHdfsFileClient(Map<String, Object> conf, Map<String, Object> kerberos) {
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
return hdfsClient;
}
use of com.dtstack.dtcenter.loader.client.IHdfsFile in project Taier by DTStack.
the class HdfsOperator method copyToLocal.
public static void copyToLocal(Map<String, Object> conf, Map<String, Object> kerberos, String srcPath, String dstPath) {
HdfsSourceDTO sourceDTO = getSourceDTO(conf, kerberos);
IHdfsFile hdfsClient = ClientCache.getHdfs(DataSourceType.HDFS.getVal());
hdfsClient.copyToLocal(sourceDTO, srcPath, dstPath);
}
Aggregations