Search in sources :

Example 1 with FileDTO

use of org.hzero.boot.file.dto.FileDTO in project agile-service by open-hand.

the class StaticFileCompressServiceImpl method unCompressedByApache.

/**
 * 可使用apache解压工具进行解压的流程一致,根据文件后缀名获取不同的压缩流
 *
 * @param staticFileCompress            解压参数
 * @param projectId                     项目id
 * @param organizationId                组织id
 * @param suffix                        文件后缀名
 * @param staticFileCompressHistoryList 解压操作历史记录
 * @throws IOException io错误
 */
private void unCompressedByApache(StaticFileCompressDTO staticFileCompress, Long projectId, Long organizationId, String suffix, List<StaticFileOperationHistoryDTO> staticFileCompressHistoryList) throws IOException {
    Long userId = DetailsHelper.getUserDetails().getUserId();
    StaticFileHeaderDTO update = new StaticFileHeaderDTO();
    update.setId(staticFileCompress.getId());
    int size = staticFileCompress.getSize();
    double process = 0.0;
    List<StaticFileLineDTO> lineList = new ArrayList<>();
    List<String> urls = new ArrayList<>();
    String prefixPath = staticFileCompress.getPrefixPath();
    try (BufferedInputStream bufferedInputStream = new BufferedInputStream(staticFileCompress.getIn());
        ArchiveInputStream in = getArchiveInputStream(bufferedInputStream, suffix, staticFileCompress.getEncode())) {
        ArchiveEntry entry;
        while (Objects.nonNull(entry = in.getNextEntry())) {
            int availableSize = bufferedInputStream.available();
            if (!entry.isDirectory() && in.canReadEntryData(entry)) {
                byte[] bytes = inputToByte(in);
                int newSize = bytes.length;
                // 跳过文件夹与不能读取数据的项
                if (entry.getName().contains(MACOSX) || entry.getName().contains(DS_STORE) || newSize <= 0) {
                    // 跳过冗余文件
                    continue;
                }
                // 文件上传
                String url = fileClient.uploadFile(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), null, getEntryFileName(entry.getName()), bytes);
                urls.add(url);
                String relativePath = filePathService.generateRelativePath(url);
                StaticFileLineDTO staticFileLine = new StaticFileLineDTO(projectId, organizationId, staticFileCompress.getId(), relativePath, dealRelativePath(entry.getName(), prefixPath));
                lineList.add(staticFileLine);
            }
            process = updateProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory(), size, (size - availableSize), process, staticFileCompress.getIssueId());
        }
        // 获取上传的文件信息
        List<FileDTO> files = fileClient.getFiles(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), urls);
        Map<String, FileDTO> fileMap = files.stream().collect(Collectors.toMap(file -> filePathService.generateRelativePath(file.getFileUrl()), file -> file));
        lineList.forEach(line -> {
            // 设置行的文件类型及其记录其他信息
            line.setId(snowflakeHelper.next());
            line.setCreatedBy(userId);
            line.setLastUpdatedBy(userId);
            line.setFileType(fileMap.get(line.getUrl()) != null ? fileMap.get(line.getUrl()).getFileType() : null);
        });
        staticFileLineMapper.batchInsert(lineList);
        updateHistoryStatus(staticFileCompress.getStaticFileCompressHistory(), SUCCESS);
        staticFileCompress.setStatus(SUCCESS);
        sendProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory().getUserId(), projectId, staticFileCompress.getIssueId());
    }
}
Also used : StaticFileOperationHistoryDTO(io.choerodon.agile.infra.dto.StaticFileOperationHistoryDTO) StringUtils(org.apache.commons.lang.StringUtils) StaticFileLineDTO(io.choerodon.agile.infra.dto.StaticFileLineDTO) FileClient(org.hzero.boot.file.FileClient) BufferedInputStream(java.io.BufferedInputStream) URL(java.net.URL) StaticFileHeaderDTO(io.choerodon.agile.infra.dto.StaticFileHeaderDTO) LoggerFactory(org.slf4j.LoggerFactory) Autowired(org.springframework.beans.factory.annotation.Autowired) io.choerodon.agile.infra.mapper(io.choerodon.agile.infra.mapper) BigDecimal(java.math.BigDecimal) Map(java.util.Map) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) RoundingMode(java.math.RoundingMode) Archive(com.github.junrar.Archive) SnowflakeHelper(io.choerodon.mybatis.helper.snowflake.SnowflakeHelper) DetailsHelper(io.choerodon.core.oauth.DetailsHelper) FilePathService(io.choerodon.agile.app.service.FilePathService) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) FileHeader(com.github.junrar.rarfile.FileHeader) List(java.util.List) Lazy(org.springframework.context.annotation.Lazy) Async(org.springframework.scheduling.annotation.Async) TypeToken(org.modelmapper.TypeToken) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArchiveEntry(org.apache.commons.compress.archivers.ArchiveEntry) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ArrayList(java.util.ArrayList) StaticFileCompressDTO(io.choerodon.agile.infra.dto.StaticFileCompressDTO) ModelMapper(org.modelmapper.ModelMapper) RarException(com.github.junrar.exception.RarException) FileUploadBucket(io.choerodon.agile.infra.enums.FileUploadBucket) Service(org.springframework.stereotype.Service) CommonException(io.choerodon.core.exception.CommonException) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) StaticFileOperationHistorySocketVO(io.choerodon.agile.api.vo.StaticFileOperationHistorySocketVO) Logger(org.slf4j.Logger) MalformedURLException(java.net.MalformedURLException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) ZipArchiveInputStream(org.apache.commons.compress.archivers.zip.ZipArchiveInputStream) StaticFileCompressService(io.choerodon.agile.app.service.StaticFileCompressService) FileDTO(org.hzero.boot.file.dto.FileDTO) MultipartFile(org.springframework.web.multipart.MultipartFile) MessageClientC7n(io.choerodon.core.client.MessageClientC7n) Transactional(org.springframework.transaction.annotation.Transactional) InputStream(java.io.InputStream) StaticFileLineDTO(io.choerodon.agile.infra.dto.StaticFileLineDTO) FileDTO(org.hzero.boot.file.dto.FileDTO) ArrayList(java.util.ArrayList) ArchiveEntry(org.apache.commons.compress.archivers.ArchiveEntry) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ZipArchiveInputStream(org.apache.commons.compress.archivers.zip.ZipArchiveInputStream) BufferedInputStream(java.io.BufferedInputStream) StaticFileHeaderDTO(io.choerodon.agile.infra.dto.StaticFileHeaderDTO)

Example 2 with FileDTO

use of org.hzero.boot.file.dto.FileDTO in project agile-service by open-hand.

the class StaticFileCompressServiceImpl method unRar.

private void unRar(StaticFileCompressDTO staticFileCompress, Long projectId, Long organizationId, List<StaticFileOperationHistoryDTO> staticFileCompressHistoryList) throws IOException {
    Long userId = DetailsHelper.getUserDetails().getUserId();
    List<StaticFileLineDTO> lineList = new ArrayList<>();
    List<String> urls = new ArrayList<>();
    String prefixPath = staticFileCompress.getPrefixPath();
    long nowSize = 0;
    double process = 0.0;
    int size = staticFileCompress.getSize();
    try {
        Archive archive = new Archive(staticFileCompress.getIn());
        FileHeader fileHeader;
        while (Objects.nonNull(fileHeader = archive.nextFileHeader())) {
            long newSize = fileHeader.getPackSize();
            nowSize += newSize;
            if (!fileHeader.isDirectory()) {
                byte[] bytes = inputToByte(archive.getInputStream(fileHeader));
                // 跳过文件夹与不能读取数据的项
                if (fileHeader.getFileName().contains(MACOSX) || fileHeader.getFileName().contains(DS_STORE) || newSize <= 0) {
                    // 跳过冗余文件和空文件
                    continue;
                }
                String url = fileClient.uploadFile(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), null, getEntryFileName(fileHeader.getFileName()), bytes);
                urls.add(url);
                String relativePath = filePathService.generateRelativePath(url);
                StaticFileLineDTO staticFileLine = new StaticFileLineDTO(projectId, organizationId, staticFileCompress.getId(), relativePath, dealRelativePathSlash(fileHeader.getFileName(), prefixPath));
                lineList.add(staticFileLine);
                process = updateProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory(), size, nowSize, process, staticFileCompress.getIssueId());
            }
        }
    } catch (RarException e) {
        throw new CommonException(RAR4_EXCEPTION_CODE);
    }
    // 获取上传的文件信息
    List<FileDTO> files = fileClient.getFiles(organizationId, FileUploadBucket.AGILE_BUCKET.bucket(), urls);
    Map<String, FileDTO> fileMap = files.stream().collect(Collectors.toMap(file -> filePathService.generateRelativePath(file.getFileUrl()), file -> file));
    lineList.forEach(line -> {
        // 设置行的文件类型及其记录其他信息
        line.setId(snowflakeHelper.next());
        line.setCreatedBy(userId);
        line.setLastUpdatedBy(userId);
        line.setFileType(fileMap.get(line.getUrl()) != null ? fileMap.get(line.getUrl()).getFileType() : null);
    });
    staticFileLineMapper.batchInsert(lineList);
    updateHistoryStatus(staticFileCompress.getStaticFileCompressHistory(), SUCCESS);
    staticFileCompress.setStatus(SUCCESS);
    sendProcess(staticFileCompressHistoryList, staticFileCompress.getStaticFileCompressHistory().getUserId(), projectId, staticFileCompress.getIssueId());
}
Also used : StaticFileOperationHistoryDTO(io.choerodon.agile.infra.dto.StaticFileOperationHistoryDTO) StringUtils(org.apache.commons.lang.StringUtils) StaticFileLineDTO(io.choerodon.agile.infra.dto.StaticFileLineDTO) FileClient(org.hzero.boot.file.FileClient) BufferedInputStream(java.io.BufferedInputStream) URL(java.net.URL) StaticFileHeaderDTO(io.choerodon.agile.infra.dto.StaticFileHeaderDTO) LoggerFactory(org.slf4j.LoggerFactory) Autowired(org.springframework.beans.factory.annotation.Autowired) io.choerodon.agile.infra.mapper(io.choerodon.agile.infra.mapper) BigDecimal(java.math.BigDecimal) Map(java.util.Map) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) RoundingMode(java.math.RoundingMode) Archive(com.github.junrar.Archive) SnowflakeHelper(io.choerodon.mybatis.helper.snowflake.SnowflakeHelper) DetailsHelper(io.choerodon.core.oauth.DetailsHelper) FilePathService(io.choerodon.agile.app.service.FilePathService) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) FileHeader(com.github.junrar.rarfile.FileHeader) List(java.util.List) Lazy(org.springframework.context.annotation.Lazy) Async(org.springframework.scheduling.annotation.Async) TypeToken(org.modelmapper.TypeToken) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArchiveEntry(org.apache.commons.compress.archivers.ArchiveEntry) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) ArrayList(java.util.ArrayList) StaticFileCompressDTO(io.choerodon.agile.infra.dto.StaticFileCompressDTO) ModelMapper(org.modelmapper.ModelMapper) RarException(com.github.junrar.exception.RarException) FileUploadBucket(io.choerodon.agile.infra.enums.FileUploadBucket) Service(org.springframework.stereotype.Service) CommonException(io.choerodon.core.exception.CommonException) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) StaticFileOperationHistorySocketVO(io.choerodon.agile.api.vo.StaticFileOperationHistorySocketVO) Logger(org.slf4j.Logger) MalformedURLException(java.net.MalformedURLException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) ZipArchiveInputStream(org.apache.commons.compress.archivers.zip.ZipArchiveInputStream) StaticFileCompressService(io.choerodon.agile.app.service.StaticFileCompressService) FileDTO(org.hzero.boot.file.dto.FileDTO) MultipartFile(org.springframework.web.multipart.MultipartFile) MessageClientC7n(io.choerodon.core.client.MessageClientC7n) Transactional(org.springframework.transaction.annotation.Transactional) InputStream(java.io.InputStream) StaticFileLineDTO(io.choerodon.agile.infra.dto.StaticFileLineDTO) FileDTO(org.hzero.boot.file.dto.FileDTO) Archive(com.github.junrar.Archive) ArrayList(java.util.ArrayList) RarException(com.github.junrar.exception.RarException) CommonException(io.choerodon.core.exception.CommonException) FileHeader(com.github.junrar.rarfile.FileHeader)

Aggregations

JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 Archive (com.github.junrar.Archive)2 RarException (com.github.junrar.exception.RarException)2 FileHeader (com.github.junrar.rarfile.FileHeader)2 StaticFileOperationHistorySocketVO (io.choerodon.agile.api.vo.StaticFileOperationHistorySocketVO)2 FilePathService (io.choerodon.agile.app.service.FilePathService)2 StaticFileCompressService (io.choerodon.agile.app.service.StaticFileCompressService)2 StaticFileCompressDTO (io.choerodon.agile.infra.dto.StaticFileCompressDTO)2 StaticFileHeaderDTO (io.choerodon.agile.infra.dto.StaticFileHeaderDTO)2 StaticFileLineDTO (io.choerodon.agile.infra.dto.StaticFileLineDTO)2 StaticFileOperationHistoryDTO (io.choerodon.agile.infra.dto.StaticFileOperationHistoryDTO)2 FileUploadBucket (io.choerodon.agile.infra.enums.FileUploadBucket)2 io.choerodon.agile.infra.mapper (io.choerodon.agile.infra.mapper)2 MessageClientC7n (io.choerodon.core.client.MessageClientC7n)2 CommonException (io.choerodon.core.exception.CommonException)2 DetailsHelper (io.choerodon.core.oauth.DetailsHelper)2 SnowflakeHelper (io.choerodon.mybatis.helper.snowflake.SnowflakeHelper)2 BufferedInputStream (java.io.BufferedInputStream)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2