use of org.hzero.boot.file.dto.FileDTO in project agile-service by open-hand.
the class StaticFileCompressServiceImpl method unCompressedByApache.
/**
* 可使用apache解压工具进行解压的流程一致,根据文件后缀名获取不同的压缩流
*
* @param staticFileCompress 解压参数
* @param projectId 项目id
* @param organizationId 组织id
* @param suffix 文件后缀名
* @param staticFileCompressHistoryList 解压操作历史记录
* @throws IOException io错误
*/
private void unCompressedByApache(StaticFileCompressDTO staticFileCompress, Long projectId, Long organizationId, String suffix, List<StaticFileOperationHistoryDTO> staticFileCompressHistoryList) throws IOException {
Long userId = DetailsHelper.getUserDetails().getUserId();
StaticFileHeaderDTO update = new StaticFileHeaderDTO();
update.setId(staticFileCompress.getId());
int size = staticFileCompress.getSize();
double process = 0.0;
List<StaticFileLineDTO> lineList = new ArrayList<>();
List<String> urls = new ArrayList<>();
String prefixPath = staticFileCompress.getPrefixPath();
try (BufferedInputStream bufferedInputStream = new BufferedInputStream(staticFileCompress.getIn());
ArchiveInputStream in = getArchiveInputStream(bufferedInputStream, suffix, staticFileCompress.getEncode())) {
ArchiveEntry entry;
while (Objects.nonNull(entry = in.getNextEntry())) {
int availableSize = bufferedInputStream.available();
if (!entry.isDirectory() && in.canReadEntryData(entry)) {
byte[] bytes = inputToByte(in);
int newSize = bytes.length;
// 跳过文件夹与不能读取数据的项
if (entry.getName().contains(MACOSX) || entry.getName().contains(DS_STORE) || newSize <= 0) {
// 跳过冗余文件
continue;
}
// 文件上传
String url = fileClient.uploadFile(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), null, getEntryFileName(entry.getName()), bytes);
urls.add(url);
String relativePath = filePathService.generateRelativePath(url);
StaticFileLineDTO staticFileLine = new StaticFileLineDTO(projectId, organizationId, staticFileCompress.getId(), relativePath, dealRelativePath(entry.getName(), prefixPath));
lineList.add(staticFileLine);
}
process = updateProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory(), size, (size - availableSize), process, staticFileCompress.getIssueId());
}
// 获取上传的文件信息
List<FileDTO> files = fileClient.getFiles(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), urls);
Map<String, FileDTO> fileMap = files.stream().collect(Collectors.toMap(file -> filePathService.generateRelativePath(file.getFileUrl()), file -> file));
lineList.forEach(line -> {
// 设置行的文件类型及其记录其他信息
line.setId(snowflakeHelper.next());
line.setCreatedBy(userId);
line.setLastUpdatedBy(userId);
line.setFileType(fileMap.get(line.getUrl()) != null ? fileMap.get(line.getUrl()).getFileType() : null);
});
staticFileLineMapper.batchInsert(lineList);
updateHistoryStatus(staticFileCompress.getStaticFileCompressHistory(), SUCCESS);
staticFileCompress.setStatus(SUCCESS);
sendProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory().getUserId(), projectId, staticFileCompress.getIssueId());
}
}
use of org.hzero.boot.file.dto.FileDTO in project agile-service by open-hand.
the class StaticFileCompressServiceImpl method unRar.
private void unRar(StaticFileCompressDTO staticFileCompress, Long projectId, Long organizationId, List<StaticFileOperationHistoryDTO> staticFileCompressHistoryList) throws IOException {
Long userId = DetailsHelper.getUserDetails().getUserId();
List<StaticFileLineDTO> lineList = new ArrayList<>();
List<String> urls = new ArrayList<>();
String prefixPath = staticFileCompress.getPrefixPath();
long nowSize = 0;
double process = 0.0;
int size = staticFileCompress.getSize();
try {
Archive archive = new Archive(staticFileCompress.getIn());
FileHeader fileHeader;
while (Objects.nonNull(fileHeader = archive.nextFileHeader())) {
long newSize = fileHeader.getPackSize();
nowSize += newSize;
if (!fileHeader.isDirectory()) {
byte[] bytes = inputToByte(archive.getInputStream(fileHeader));
// 跳过文件夹与不能读取数据的项
if (fileHeader.getFileName().contains(MACOSX) || fileHeader.getFileName().contains(DS_STORE) || newSize <= 0) {
// 跳过冗余文件和空文件
continue;
}
String url = fileClient.uploadFile(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), null, getEntryFileName(fileHeader.getFileName()), bytes);
urls.add(url);
String relativePath = filePathService.generateRelativePath(url);
StaticFileLineDTO staticFileLine = new StaticFileLineDTO(projectId, organizationId, staticFileCompress.getId(), relativePath, dealRelativePathSlash(fileHeader.getFileName(), prefixPath));
lineList.add(staticFileLine);
process = updateProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory(), size, nowSize, process, staticFileCompress.getIssueId());
}
}
} catch (RarException e) {
throw new CommonException(RAR4_EXCEPTION_CODE);
}
// 获取上传的文件信息
List<FileDTO> files = fileClient.getFiles(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), urls);
Map<String, FileDTO> fileMap = files.stream().collect(Collectors.toMap(file -> filePathService.generateRelativePath(file.getFileUrl()), file -> file));
lineList.forEach(line -> {
// 设置行的文件类型及其记录其他信息
line.setId(snowflakeHelper.next());
line.setCreatedBy(userId);
line.setLastUpdatedBy(userId);
line.setFileType(fileMap.get(line.getUrl()) != null ? fileMap.get(line.getUrl()).getFileType() : null);
});
staticFileLineMapper.batchInsert(lineList);
updateHistoryStatus(staticFileCompress.getStaticFileCompressHistory(), SUCCESS);
staticFileCompress.setStatus(SUCCESS);
sendProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory().getUserId(), projectId, staticFileCompress.getIssueId());
}
Aggregations